Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] React Native example #100

Draft
wants to merge 4 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 39 additions & 0 deletions evi-react-native-example/EVIExample/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# Learn more https://docs.github.com/en/get-started/getting-started-with-git/ignoring-files

# dependencies
node_modules/

# Expo
.expo/
dist/
web-build/

# Native
*.orig.*
*.jks
*.p8
*.p12
*.key
*.mobileprovision
.env

# Metro
.metro-health-check*

# debug
npm-debug.*
yarn-debug.*
yarn-error.*

# macOS
.DS_Store
*.pem

# local env files
.env*.local

# typescript
*.tsbuildinfo

ios
android
1 change: 1 addition & 0 deletions evi-react-native-example/EVIExample/.npmrc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
node-linker=hoisted
322 changes: 322 additions & 0 deletions evi-react-native-example/EVIExample/App.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,322 @@
import React, { useEffect, useState, useRef } from 'react';
import {
View,
Text,
Button,
StyleSheet,
ScrollView,
SafeAreaView,
LayoutAnimation,
} from 'react-native';

let outstanding = 0
// We use Hume's low-level typescript SDK for this example.
// The React SDK (@humeai/voice-react) does not support React Native.
import { HumeClient, type Hume } from 'hume'

import * as NativeAudio from './modules/audio';

interface ChatEntry {
role: 'user' | 'assistant';
timestamp: string;
content: string;
}

const hume = new HumeClient({
apiKey: process.env.EXPO_PUBLIC_HUME_API_KEY || ''
})

// EVI can send audio output messages faster than they can be played back.
// It is important to buffer them in a queue so as not to cut off a clip of
// playing audio with a more recent clip.
class AudioQueue {
private tasks: Array<() => Promise<void>> = []
private currentClip: Promise<void> | null = null;

private advance() {
console.log('Advancing audio queue...')
if (this.tasks.length === 0) {
this.currentClip = null;
return
}
this.currentClip = this.tasks.shift()!().then(() => this.advance())
}

public add(playAudio: () => Promise<void>) {
console.log('Adding to queue...')
this.tasks.push(playAudio)

if (!this.currentClip) {
this.advance()
}
}

public clear() {
console.log('Clearing audio queue...')
this.tasks = []
this.currentClip = null
}
}

const audioQueue = new AudioQueue()

const App = () => {
const [isConnected, setIsConnected] = useState(false);
const [isMuted, setIsMuted] = useState(false);
const [chatEntries, setChatEntries] = useState<ChatEntry[]>([]);
const addChatEntry = (entry: ChatEntry) => {
setChatEntries((prev) => [...prev, entry]);
}

// Scroll to the bottom of the chat display when new messages are added
const scrollViewRef = useRef<ScrollView | null>(null);
useEffect(() => {
if (scrollViewRef.current) {
LayoutAnimation.configureNext(LayoutAnimation.Presets.easeInEaseOut);
scrollViewRef.current.scrollToEnd();
}
}, [chatEntries]);

const chatSocketRef = useRef<Hume.empathicVoice.chat.ChatSocket | null>(null);

useEffect(() => {
(async () => {
if (isConnected) {
try {
await NativeAudio.getPermissions()
} catch (error) {
console.error('Failed to get permissions:', error)
}
try {
await NativeAudio.startRecording()
} catch (error) {
console.error('Failed to start recording:', error)
}

const chatSocket = hume.empathicVoice.chat.connect({
configId: process.env.EXPO_PUBLIC_HUME_CONFIG_ID,
})
chatSocket.on('open', () => {
// The code within the native modules converts the default system audio format
// system audio to linear 16 PCM, a standard format recognized by EVI. For linear16 PCM
// you must send a `session_settings` message to EVI to inform EVI of the
// correct sampling rate.
chatSocket.sendSessionSettings({
audio: {
encoding: "linear16",
channels: 1,
sampleRate: NativeAudio.sampleRate,
}
})
})
chatSocket.on('message', handleIncomingMessage);

chatSocket.on('error', (error) => {
console.error("WebSocket Error:", error);
});

console.log('Registering handler')
chatSocket.on('close', () => {
console.log('Socket closing')
setIsConnected(false);
});

chatSocketRef.current = chatSocket;

NativeAudio.onAudioInput(({ base64EncodedAudio }: NativeAudio.AudioEventPayload) => {
if (chatSocket.readyState !== WebSocket.OPEN) {
console.log('Socket not open, not sending audio input...')
return
}
chatSocket.sendAudioInput({ data: base64EncodedAudio });
})
} else {
try {
await NativeAudio.stopRecording()
} catch (error) {
console.error('Error while stopping recording', error)
}
if (chatSocketRef.current) {
chatSocketRef.current.close();
}
}
})()
return () => {
NativeAudio.stopRecording().catch((error) => {
console.error('Error while stopping recording', error)
})
if (chatSocketRef.current && chatSocketRef.current.readyState === WebSocket.OPEN) {
chatSocketRef.current?.close();
}
}
}, [isConnected]);


useEffect(() => {
if (isMuted) {
NativeAudio.mute().catch((error) => {
console.error('Error while muting', error)
})
} else {
NativeAudio.unmute().catch((error) => {
console.error('Error while unmuting', error)
});
}
}, [isMuted])


const handleInterruption = () => {
console.log("Clearing audio queue...")
audioQueue.clear()
NativeAudio.stopPlayback()
}

const handleIncomingMessage = async (message: Hume.empathicVoice.SubscribeEvent) => {
switch (message.type) {
case 'error':
console.error(message);
break;
case 'chat_metadata':
// Contains useful information:
// - chat_id: a unique identifier for the chat session, useful if you want to retrieve transcripts later
// - chat_group_id: passing a "chat group" allows you to preserve context and resume the same conversation with EVI
// in a new websocket connection, e.g. after a disconnection.
console.log('Received chat metadata:', message);
break;
case 'audio_output':
audioQueue.add(() => NativeAudio.playAudio(message.data));
break;
case 'user_message':
case 'assistant_message':
if (message.message.role !== 'user' && message.message.role !== 'assistant') {
console.error(`Unhandled: received message with role: ${message.message.role}`);
return;
}
if (message.type === 'user_message') {
handleInterruption()
}
addChatEntry({
role: message.message.role,
timestamp: new Date().toString(),
content: message.message.content!,
});
break;
case 'user_interruption':
handleInterruption()
break;

// This message type indicate the end of EVI's "turn" in the conversation. They are not
// needed in this example, however they could be useful in an audio environment that didn't have
// good echo cancellation, so that you could auto-mute the user's microphone while EVI was
// speaking.
case 'assistant_end':

// These messages are not needed in this example. There are for EVI's "tool use" feature:
// https://dev.hume.ai/docs/empathic-voice-interface-evi/tool-use
case 'tool_call':
case 'tool_error':
case 'tool_response':
console.log(`Received unhandled message type: ${message.type}`);
break;
default:
const _: never = message;
console.error(`Unexpected message`);
console.error(message)
break;
}
};

return (
<View style={styles.appBackground}>
<SafeAreaView style={styles.container}>
<View style={styles.header}>
<Text style={styles.headerText}>You are {isConnected ? 'connected' : 'disconnected'}</Text>
</View>
<ScrollView style={styles.chatDisplay} ref={scrollViewRef}>
{chatEntries.map((entry, index) => (
<View
key={index}
style={[
styles.chatEntry,
entry.role === 'user' ? styles.userChatEntry : styles.assistantChatEntry,
]}
>
<Text style={styles.chatText}>{entry.content}</Text>
</View>
))}
</ScrollView>
<View style={styles.buttonContainer}>
<Button
title={isConnected ? 'Disconnect' : 'Connect'}
onPress={() => setIsConnected(!isConnected)}
/>
<Button title={isMuted ? 'Unmute' : 'Mute'} onPress={() => setIsMuted(!isMuted)} />
</View>
</SafeAreaView>
</View>
);
};

const styles = StyleSheet.create({
appBackground: {
flex: 1,
backgroundColor: 'rgb(255, 244, 232)',
alignItems: 'center',
},
container: {
flex: 1,
justifyContent: 'center',
padding: 16,
maxWidth: 600,
width: '100%'
},
header: {
marginBottom: 16,
alignItems: 'center',
},
headerText: {
fontSize: 18,
fontWeight: 'bold',
},
chatDisplay: {
flex: 1,
width: '100%',
marginBottom: 16,
},
chatEntry: {
padding: 10,
marginVertical: 5,
borderRadius: 15,
maxWidth: '75%',
shadowColor: '#000',
shadowOffset: {
width: 0,
height: 2,
},
shadowOpacity: 0.1,
shadowRadius: 2,
elevation: 3,
},
userChatEntry: {
backgroundColor: 'rgb(209, 226, 243)',
alignSelf: 'flex-end',
marginRight: 10,
},
assistantChatEntry: {
backgroundColor: '#fff',
alignSelf: 'flex-start',
marginLeft: 10,
},
chatText: {
fontSize: 16,
},
buttonContainer: {
flexDirection: 'row',
justifyContent: 'space-between',
width: '100%',
paddingHorizontal: 16,
paddingVertical: 8,
},
});

export default App;
32 changes: 32 additions & 0 deletions evi-react-native-example/EVIExample/AudioQueue.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import {AudioQueue} from './AudioQueue'
test('AudioQueue', async () => {
const log: Array<string> = []
const aq = new AudioQueue()
log.push('1') // t=0
aq.add(async () => {
// t=0
log.push('2')
await new Promise((resolve) => setTimeout(resolve, 25))
// t=25
log.push('3')
await new Promise((resolve) => setTimeout(resolve, 75))
// t=100
log.push('5')
})
await new Promise((resolve) => setTimeout(resolve, 50))
// t=50
log.push('4')
aq.add(async () => {
// t=100
log.push('6')
await new Promise((resolve) => setTimeout(resolve, 100))
// t=200
log.push('7')
})
await new Promise((resolve) => setTimeout(resolve, 202))
expect(log.join(',')).toBe('1,2,3,4,5,6,7')
// @ts-ignore
expect(aq.tasks).toEqual([])
// @ts-ignore
expect(aq.currentClip).toEqual(null)
})
Loading