Merge pull request #308 from benxu3/01-app

replace old rn app with 01-app
pull/311/head
killian 4 months ago committed by GitHub
commit 63cc2a2f01
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

4
.gitmodules vendored

@ -0,0 +1,4 @@
[submodule "software/source/clients/mobile/01-app"]
path = software/source/clients/mobile/01-app
url = https://github.com/OpenInterpreter/01-app.git
branch = main

@ -0,0 +1 @@
Subproject commit 39869d3252a5d4620a22d57b34cdd65ab9e72ed5

@ -1,32 +0,0 @@
# iOS/Android Client
**_WORK IN PROGRESS_**
This repository contains the source code for the 01 iOS/Android app. Work in progress, we will continue to improve this application to get it working properly.
Feel free to improve this and make a pull request!
If you want to run it on your own, you will need to install Expo Go on your mobile device.
## Setup Instructions
Follow the **[software setup steps](https://github.com/OpenInterpreter/01?tab=readme-ov-file#software)** in the main repo's README first before you read this
```shell
cd software/source/clients/mobile/react-native # cd into `react-native`
npm install # install dependencies
npx expo start # start local development server
```
In **Expo Go** select _Scan QR code_ to scan the QR code produced by the `npx expo start` command
## Using the App
```shell
cd software # cd into `software`
poetry run 01 --mobile # exposes QR code for 01 Light server
```
In the app, select _Scan Code_ to scan the QR code produced by the `poetry run 01 --mobile` command
Press and hold the button to speak, release to make the request. To rescan the QR code, swipe left on the screen to go back.

@ -1,31 +0,0 @@
import * as React from "react";
import { NavigationContainer } from "@react-navigation/native";
import { createNativeStackNavigator } from "@react-navigation/native-stack";
import HomeScreen from "./src/screens/HomeScreen";
import CameraScreen from "./src/screens/Camera";
import Main from "./src/screens/Main";
import { StatusBar } from "expo-status-bar";
const Stack = createNativeStackNavigator();
function App() {
return (
<>
<StatusBar style="light" />
<NavigationContainer>
<Stack.Navigator
initialRouteName="Home"
screenOptions={{
headerShown: false, // This hides the navigation bar globally
}}
>
<Stack.Screen name="Home" component={HomeScreen} />
<Stack.Screen name="Camera" component={CameraScreen} />
<Stack.Screen name="Main" component={Main} />
</Stack.Navigator>
</NavigationContainer>
</>
);
}
export default App;

@ -1,38 +0,0 @@
{
"expo": {
"name": "01iOS",
"slug": "01iOS",
"version": "1.0.0",
"orientation": "portrait",
"icon": "./assets/icon.png",
"userInterfaceStyle": "light",
"splash": {
"image": "./assets/splash.png",
"resizeMode": "contain",
"backgroundColor": "#ffffff"
},
"assetBundlePatterns": ["**/*"],
"plugins": [
[
"expo-camera",
{
"cameraPermission": "Allow $(PRODUCT_NAME) to access your camera",
"microphonePermission": "Allow $(PRODUCT_NAME) to access your microphone",
"recordAudioAndroid": true
}
]
],
"ios": {
"supportsTablet": true
},
"android": {
"adaptiveIcon": {
"foregroundImage": "./assets/adaptive-icon.png",
"backgroundColor": "#ffffff"
}
},
"web": {
"favicon": "./assets/favicon.png"
}
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

@ -1,6 +0,0 @@
module.exports = function(api) {
api.cache(true);
return {
presets: ['babel-preset-expo'],
};
};

File diff suppressed because it is too large Load Diff

@ -1,45 +0,0 @@
{
"name": "01ios",
"version": "1.0.0",
"main": "node_modules/expo/AppEntry.js",
"scripts": {
"start": "expo start",
"android": "expo start --android",
"ios": "expo start --ios",
"web": "expo start --web",
"ts:check": "tsc"
},
"dependencies": {
"@react-navigation/native": "^6.1.14",
"@react-navigation/native-stack": "^6.9.22",
"expo": "~50.0.8",
"expo-av": "~13.10.5",
"expo-barcode-scanner": "~12.9.3",
"expo-camera": "~14.0.5",
"expo-haptics": "~12.8.1",
"expo-permissions": "^14.4.0",
"expo-status-bar": "~1.11.1",
"react": "18.2.0",
"react-native": "0.73.4",
"react-native-base64": "^0.2.1",
"react-native-polyfill-globals": "^3.1.0",
"react-native-safe-area-context": "4.8.2",
"react-native-screens": "~3.29.0",
"text-encoding": "^0.7.0",
"zustand": "^4.5.2"
},
"devDependencies": {
"@babel/core": "^7.20.0",
"@types/react": "~18.2.45",
"@types/react-native-base64": "^0.2.2",
"typescript": "^5.1.3"
},
"ios": {
"infoPlist": {
"NSAppTransportSecurity": {
"NSAllowsArbitraryLoads": true
}
}
},
"private": true
}

@ -1,113 +0,0 @@
import React, { useState } from "react";
import { StyleSheet, Text, TouchableOpacity, View } from "react-native";
import { Camera } from "expo-camera";
import { useNavigation } from "@react-navigation/native";
import { BarCodeScanner } from "expo-barcode-scanner";
// import useSoundEffect from "../lib/useSoundEffect";
export default function CameraScreen() {
const [permission, requestPermission] = Camera.useCameraPermissions();
// const playYay = useSoundEffect(require("../../assets/yay.wav"));
const [scanned, setScanned] = useState(false);
const navigation = useNavigation();
if (!permission) {
// Component is waiting for permission
return <View />;
}
if (!permission.granted) {
// No permission granted, request permission
return (
<View style={styles.container}>
<Text>No access to camera</Text>
<TouchableOpacity onPress={requestPermission} style={styles.button}>
<Text style={styles.text}>Grant Camera Access</Text>
</TouchableOpacity>
</View>
);
}
// function toggleCameraFacing() {
// setFacing((current) => (current === "back" ? "front" : "back"));
// }
const handleBarCodeScanned = async ({
type,
data,
}: {
type: string;
data: string;
}) => {
// await playYay();
setScanned(true);
console.log(
`Bar code with type ${type} and data ${data} has been scanned!`
);
// alert(`Scanned URL: ${data}`);
navigation.navigate("Main", { scannedData: data });
};
return (
<View style={styles.container}>
<Camera
style={styles.camera}
facing={"back"}
onBarCodeScanned={scanned ? undefined : handleBarCodeScanned}
barCodeScannerSettings={{
barCodeTypes: [BarCodeScanner.Constants.BarCodeType.qr],
}}
>
<View style={styles.buttonContainer}>
{/* <TouchableOpacity style={styles.button} onPress={toggleCameraFacing}>
<Text style={styles.text}>Flip Camera</Text>
</TouchableOpacity> */}
{scanned && (
<TouchableOpacity
onPress={() => setScanned(false)}
style={styles.button}
>
<Text numberOfLines={1} style={styles.text}>
Scan Again
</Text>
</TouchableOpacity>
)}
</View>
</Camera>
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
flexDirection: "column",
justifyContent: "flex-end",
position: "relative",
},
camera: {
flex: 1,
},
buttonContainer: {
backgroundColor: "transparent",
flexDirection: "row",
margin: 2,
},
button: {
position: "absolute",
top: 44,
left: 4,
flex: 0.1,
alignSelf: "flex-end",
alignItems: "center",
backgroundColor: "#000",
borderRadius: 10,
paddingHorizontal: 8,
paddingVertical: 6,
},
text: {
fontSize: 14,
color: "white",
},
});

@ -1,47 +0,0 @@
import React from "react";
import { View, Text, TouchableOpacity, StyleSheet } from "react-native";
import { useNavigation } from "@react-navigation/native";
const HomeScreen = () => {
const navigation = useNavigation();
return (
<View style={styles.container}>
{/* <View style={styles.circle} /> */}
<TouchableOpacity
style={styles.button}
onPress={() => navigation.navigate("Camera")}
>
<Text style={styles.buttonText}>Scan Code</Text>
</TouchableOpacity>
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: "center",
alignItems: "center",
backgroundColor: "#000",
},
circle: {
width: 100,
height: 100,
borderRadius: 50,
backgroundColor: "#fff",
marginBottom: 20,
},
button: {
backgroundColor: "#fff",
paddingHorizontal: 20,
paddingVertical: 10,
borderRadius: 5,
},
buttonText: {
color: "#000",
fontSize: 16,
},
});
export default HomeScreen;

@ -1,310 +0,0 @@
import React, { useState, useEffect, useCallback, useRef } from "react";
import {
View,
Text,
TouchableOpacity,
StyleSheet,
BackHandler,
ScrollView,
} from "react-native";
import * as FileSystem from "expo-file-system";
import { Audio } from "expo-av";
import { polyfill as polyfillEncoding } from "react-native-polyfill-globals/src/encoding";
import { Animated } from "react-native";
import useSoundEffect from "../utils/useSoundEffect";
import RecordButton from "../utils/RecordButton";
import { useNavigation } from "@react-navigation/core";
interface MainProps {
route: {
params: {
scannedData: string;
};
};
}
const Main: React.FC<MainProps> = ({ route }) => {
const { scannedData } = route.params;
const [connectionStatus, setConnectionStatus] =
useState<string>("Connecting...");
const [ws, setWs] = useState<WebSocket | null>(null);
const [wsUrl, setWsUrl] = useState("");
const [rescan, setRescan] = useState(false);
const [isPressed, setIsPressed] = useState(false);
const [recording, setRecording] = useState<Audio.Recording | null>(null);
const audioQueueRef = useRef<String[]>([]);
const soundRef = useRef<Audio.Sound | null>(null);
const [soundUriMap, setSoundUriMap] = useState<Map<Audio.Sound, string>>(
new Map()
);
const audioDir = FileSystem.documentDirectory + "01/audio/";
const [permissionResponse, requestPermission] = Audio.usePermissions();
polyfillEncoding();
const backgroundColorAnim = useRef(new Animated.Value(0)).current;
const buttonBackgroundColorAnim = useRef(new Animated.Value(0)).current;
const playPip = useSoundEffect(require("../../assets/pip.mp3"));
const playPop = useSoundEffect(require("../../assets/pop.mp3"));
const navigation = useNavigation();
const backgroundColor = backgroundColorAnim.interpolate({
inputRange: [0, 1],
outputRange: ["black", "white"],
});
const buttonBackgroundColor = backgroundColorAnim.interpolate({
inputRange: [0, 1],
outputRange: ["white", "black"],
});
const [accumulatedMessage, setAccumulatedMessage] = useState<string>("");
const scrollViewRef = useRef<ScrollView>(null);
/**
* Checks if audioDir exists in device storage, if not creates it.
*/
async function dirExists() {
try {
const dirInfo = await FileSystem.getInfoAsync(audioDir);
if (!dirInfo.exists) {
console.error("audio directory doesn't exist, creating...");
await FileSystem.makeDirectoryAsync(audioDir, { intermediates: true });
}
} catch (error) {
console.error("Error checking or creating directory:", error);
}
}
/**
* Writes the buffer to a temp file in audioDir in base64 encoding.
*
* @param {string} buffer
* @returns tempFilePath or null
*/
const constructTempFilePath = async (buffer: string) => {
try {
await dirExists();
if (!buffer) {
console.log("Buffer is undefined or empty.");
return null;
}
const tempFilePath = `${audioDir}${Date.now()}.wav`;
await FileSystem.writeAsStringAsync(tempFilePath, buffer, {
encoding: FileSystem.EncodingType.Base64,
});
return tempFilePath;
} catch (error) {
console.log("Failed to construct temp file path:", error);
return null; // Return null to prevent crashing, error is logged
}
};
/**
* Plays the next audio in audioQueue if the queue is not empty
* and there is no currently playing audio.
*/
const playNextAudio = useCallback(async () => {
if (audioQueueRef.current.length > 0 && soundRef.current == null) {
const uri = audioQueueRef.current.at(0) as string;
try {
const { sound: newSound } = await Audio.Sound.createAsync({ uri });
soundRef.current = newSound;
setSoundUriMap(new Map(soundUriMap.set(newSound, uri)));
await newSound.playAsync();
newSound.setOnPlaybackStatusUpdate(_onPlayBackStatusUpdate);
} catch (error) {
console.log("Error playing audio", error);
}
} else {
// audioQueue is empty or sound is not null
return;
}
},[]);
/**
* Queries the currently playing Expo Audio.Sound object soundRef
* for playback status. When the status denotes soundRef has finished
* playback, we unload the sound and call playNextAudio().
*/
const _onPlayBackStatusUpdate = useCallback(
async (status: any) => {
if (status.didJustFinish) {
audioQueueRef.current.shift();
await soundRef.current?.unloadAsync();
if (soundRef.current) {
soundUriMap.delete(soundRef.current);
setSoundUriMap(new Map(soundUriMap));
}
soundRef.current = null;
playNextAudio();
}
},[]);
/**
* Single swipe to return to the Home screen from the Main page.
*/
useEffect(() => {
const backAction = () => {
navigation.navigate("Home"); // Always navigate back to Home
return true; // Prevent default action
};
// Add event listener for hardware back button on Android
const backHandler = BackHandler.addEventListener(
"hardwareBackPress",
backAction
);
return () => backHandler.remove();
}, [navigation]);
/**
* Handles all WebSocket events
*/
useEffect(() => {
let websocket: WebSocket;
try {
// console.log("Connecting to WebSocket at " + scannedData);
setWsUrl(scannedData);
websocket = new WebSocket(scannedData);
websocket.binaryType = "blob";
websocket.onopen = () => {
setConnectionStatus(`Connected`);
};
websocket.onmessage = async (e) => {
try {
const message = JSON.parse(e.data);
if (message.content && message.type == "message" && message.role == "assistant"){
setAccumulatedMessage((prevMessage) => prevMessage + message.content);
scrollViewRef.current?.scrollToEnd({ animated: true });
}
if (message.content && message.type == "audio") {
const buffer = message.content;
if (buffer && buffer.length > 0) {
const filePath = await constructTempFilePath(buffer);
if (filePath !== null) {
audioQueueRef.current.push(filePath);
if (audioQueueRef.current.length == 1) {
playNextAudio();
}
} else {
console.error("Failed to create file path");
}
} else {
console.error("Received message is empty or undefined");
}
}
} catch (error) {
console.error("Error handling WebSocket message:", error);
}
};
websocket.onerror = (error) => {
setConnectionStatus("Error connecting to WebSocket.");
console.error("WebSocket error: ", error);
};
websocket.onclose = () => {
setConnectionStatus("Disconnected.");
};
setWs(websocket);
} catch (error) {
console.log(error);
setConnectionStatus("Error creating WebSocket.");
}
return () => {
if (websocket) {
websocket.close();
}
};
}, [scannedData, rescan]);
return (
<Animated.View style={[styles.container, { backgroundColor }]}>
<View style={{flex: 6, alignItems: "center", justifyContent: "center",}}>
<ScrollView
ref={scrollViewRef}
style={styles.scrollViewContent}
showsVerticalScrollIndicator={false}
>
<Text style={styles.accumulatedMessage}>
{accumulatedMessage}
</Text>
</ScrollView>
</View>
<View style={{flex: 2, justifyContent: "center", alignItems: "center",}}>
<RecordButton
playPip={playPip}
playPop={playPop}
recording={recording}
setRecording={setRecording}
ws={ws}
backgroundColorAnim={backgroundColorAnim}
buttonBackgroundColorAnim={buttonBackgroundColorAnim}
backgroundColor={backgroundColor}
buttonBackgroundColor={buttonBackgroundColor}
setIsPressed={setIsPressed}
/>
</View>
<View style={{flex: 1}}>
<TouchableOpacity
style={styles.statusButton}
onPress={() => {
setRescan(!rescan);
}}
>
<Text
style={[
styles.statusText,
{
color: connectionStatus.startsWith("Connected")
? "green"
: "red",
},
]}
>
{connectionStatus}
</Text>
</TouchableOpacity>
</View>
</Animated.View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
},
statusText: {
fontSize: 12,
fontWeight: "bold",
},
statusButton: {
position: "absolute",
bottom: 20,
alignSelf: "center",
},
accumulatedMessage: {
margin: 20,
fontSize: 15,
textAlign: "left",
color: "white",
paddingBottom: 30,
fontFamily: "monospace",
},
scrollViewContent: {
padding: 25,
width: "90%",
maxHeight: "80%",
borderWidth: 5,
borderColor: "white",
borderRadius: 10,
},
});
export default Main;

@ -1,151 +0,0 @@
import React, { useEffect, useCallback } from "react";
import { TouchableOpacity, StyleSheet } from "react-native";
import { Audio } from "expo-av";
import { Animated } from "react-native";
import * as Haptics from "expo-haptics";
interface RecordButtonProps {
playPip: () => void;
playPop: () => void;
recording: Audio.Recording | null;
setRecording: (recording: Audio.Recording | null) => void;
ws: WebSocket | null;
buttonBackgroundColorAnim: Animated.Value;
backgroundColorAnim: Animated.Value;
backgroundColor: Animated.AnimatedInterpolation<string | number>;
buttonBackgroundColor: Animated.AnimatedInterpolation<string | number>;
setIsPressed: (isPressed: boolean) => void;
}
const styles = StyleSheet.create({
circle: {
width: 100,
height: 100,
borderRadius: 50,
justifyContent: "center",
alignItems: "center",
},
button: {
width: 100,
height: 100,
borderRadius: 50,
justifyContent: "center",
alignItems: "center",
},
});
const RecordButton: React.FC<RecordButtonProps> = ({
playPip,
playPop,
recording,
setRecording,
ws,
backgroundColorAnim,
buttonBackgroundColorAnim,
backgroundColor,
buttonBackgroundColor,
setIsPressed,
}: RecordButtonProps) => {
const [permissionResponse, requestPermission] = Audio.usePermissions();
useEffect(() => {
if (permissionResponse?.status !== "granted") {
requestPermission();
}
}, []);
const startRecording = useCallback(async () => {
if (recording) {
console.log("A recording is already in progress.");
return;
}
try {
if (
permissionResponse !== null &&
permissionResponse.status !== `granted`
) {
await requestPermission();
}
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
playsInSilentModeIOS: true,
});
const newRecording = new Audio.Recording();
await newRecording.prepareToRecordAsync(
Audio.RecordingOptionsPresets.HIGH_QUALITY
);
await newRecording.startAsync();
setRecording(newRecording);
} catch (err) {
console.error("Failed to start recording", err);
}
}, []);
const stopRecording = useCallback(async () => {
if (recording) {
await recording.stopAndUnloadAsync();
await Audio.setAudioModeAsync({
allowsRecordingIOS: false,
});
const uri = recording.getURI();
setRecording(null);
if (ws && uri) {
const response = await fetch(uri);
const blob = await response.blob();
const reader = new FileReader();
reader.readAsArrayBuffer(blob);
reader.onloadend = () => {
const audioBytes = reader.result;
if (audioBytes) {
ws.send(audioBytes);
}
};
}
}
}, [recording]);
const toggleRecording = (shouldPress: boolean) => {
Animated.timing(backgroundColorAnim, {
toValue: shouldPress ? 1 : 0,
duration: 400,
useNativeDriver: false,
}).start();
Animated.timing(buttonBackgroundColorAnim, {
toValue: shouldPress ? 1 : 0,
duration: 400,
useNativeDriver: false,
}).start();
};
return (
<TouchableOpacity
style={styles.button}
onPressIn={() => {
playPip();
setIsPressed(true);
toggleRecording(true);
startRecording();
Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy);
}}
onPressOut={() => {
playPop();
setIsPressed(false);
toggleRecording(false);
stopRecording();
Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy);
}}
>
<Animated.View
style={[styles.circle, { backgroundColor: buttonBackgroundColor }]}
/>
</TouchableOpacity>
);
};
export default RecordButton;

@ -1,10 +0,0 @@
// store.js
import { create } from "zustand";
const useStore = create((set: any) => ({
count: 0,
increase: () => set((state: any) => ({ count: state.count + 1 })),
decrease: () => set((state: any) => ({ count: state.count - 1 })),
}));
export default useStore;

@ -1,29 +0,0 @@
import { useEffect, useState } from "react";
import { Audio } from "expo-av";
const useSoundEffect = (soundFile: any) => {
const [sound, setSound] = useState<Audio.Sound | null>(null); // Explicitly set initial state to null
useEffect(() => {
const loadSound = async () => {
const { sound: newSound } = await Audio.Sound.createAsync(soundFile);
setSound(newSound);
};
loadSound();
return () => {
sound?.unloadAsync();
};
}, [soundFile, sound]); // Include sound in the dependency array
const playSound = async () => {
if (sound) {
await sound.playAsync();
}
};
return playSound;
};
export default useSoundEffect;

@ -1,6 +0,0 @@
{
"extends": "expo/tsconfig.base",
"compilerOptions": {
"strict": true
}
}
Loading…
Cancel
Save