diff --git a/software/source/clients/ios/README.md b/software/source/clients/ios/README.md index 2f65352..64ffeaf 100644 --- a/software/source/clients/ios/README.md +++ b/software/source/clients/ios/README.md @@ -1,6 +1,6 @@ # iOS/Android Client -***WORK IN PROGRESS*** +**_WORK IN PROGRESS_** This repository contains the source code for the 01 iOS/Android app. Work in progress, we will continue to improve this application to get it working properly. @@ -9,10 +9,11 @@ Feel free to improve this and make a pull request! If you want to run it on your own, you will need to install Expo Go on your mobile device. ## Setup Instructions + Follow the **[software setup steps](https://github.com/OpenInterpreter/01?tab=readme-ov-file#software)** in the main repo's README first before you read this ```shell -cd software/source/clients/ios/react-native # cd into `react-native` +cd software/source/clients/mobile/react-native # cd into `react-native` npm install # install dependencies npx expo start # start local development server ``` @@ -20,6 +21,7 @@ npx expo start # start local development server In **Expo Go** select _Scan QR code_ to scan the QR code produced by the `npx expo start` command ## Using the App + ```shell poetry run 01 --mobile # exposes QR code for 01 Light server ``` diff --git a/software/source/clients/ios/react-native/assets/qr.png b/software/source/clients/ios/react-native/assets/qr.png deleted file mode 100644 index 33cf7e0..0000000 Binary files a/software/source/clients/ios/react-native/assets/qr.png and /dev/null differ diff --git a/software/source/clients/ios/react-native/src/screens/Main.tsx b/software/source/clients/ios/react-native/src/screens/Main.tsx index ecccadd..f0136dc 100644 --- a/software/source/clients/ios/react-native/src/screens/Main.tsx +++ b/software/source/clients/ios/react-native/src/screens/Main.tsx @@ -1,15 +1,19 @@ import React, { useState, useEffect, useCallback, useRef } from "react"; -import { View, Text, TouchableOpacity, StyleSheet, BackHandler, Image } from "react-native"; +import { + View, + Text, + TouchableOpacity, + StyleSheet, + BackHandler, +} from "react-native"; import * as FileSystem from "expo-file-system"; -import { AVPlaybackStatus, AVPlaybackStatusSuccess, Audio } from "expo-av"; +import { Audio } from "expo-av"; import { polyfill as polyfillEncoding } from "react-native-polyfill-globals/src/encoding"; import { create } from "zustand"; -import useStore from "../utils/state"; import { Animated } from "react-native"; -import * as Haptics from "expo-haptics"; import useSoundEffect from "../utils/useSoundEffect"; import RecordButton from "../utils/RecordButton"; -import { useNavigation } from "@react-navigation/native"; +import { useNavigation } from "@react-navigation/core"; interface MainProps { route: { @@ -45,6 +49,8 @@ const Main: React.FC = ({ route }) => { const [connectionStatus, setConnectionStatus] = useState("Connecting..."); const [ws, setWs] = useState(null); + const [wsUrl, setWsUrl] = useState(""); + const [rescan, setRescan] = useState(false); const [isPressed, setIsPressed] = useState(false); const [recording, setRecording] = useState(null); const addToQueue = useAudioQueueStore((state) => state.addToQueue); @@ -64,13 +70,12 @@ const Main: React.FC = ({ route }) => { const navigation = useNavigation(); const backgroundColor = backgroundColorAnim.interpolate({ inputRange: [0, 1], - outputRange: ["black", "white"], // Change as needed + outputRange: ["black", "white"], }); const buttonBackgroundColor = backgroundColorAnim.interpolate({ inputRange: [0, 1], - outputRange: ["white", "black"], // Inverse of the container + outputRange: ["white", "black"], }); - const constructTempFilePath = async (buffer: string) => { try { await dirExists(); @@ -107,13 +112,8 @@ const Main: React.FC = ({ route }) => { } const playNextAudio = useCallback(async () => { - // console.log( - // `in playNextAudio audioQueue is ${audioQueue.length} and sound is ${sound}` - //); - if (audioQueue.length > 0 && sound == null) { const uri = audioQueue.shift() as string; - // console.log("load audio from", uri); try { const { sound: newSound } = await Audio.Sound.createAsync({ uri }); @@ -126,7 +126,7 @@ const Main: React.FC = ({ route }) => { playNextAudio(); } } else { - // console.log("audioQueue is empty or sound is not null"); + // audioQueue is empty or sound is not null return; } }, [audioQueue, sound, soundUriMap]); @@ -144,6 +144,21 @@ const Main: React.FC = ({ route }) => { [sound, soundUriMap, playNextAudio] ); + useEffect(() => { + const backAction = () => { + navigation.navigate("Home"); // Always navigate back to Home + return true; // Prevent default action + }; + + // Add event listener for hardware back button on Android + const backHandler = BackHandler.addEventListener( + "hardwareBackPress", + backAction + ); + + return () => backHandler.remove(); + }, [navigation]); + useEffect(() => { if (audioQueue.length > 0 && !sound) { playNextAudio(); @@ -155,14 +170,13 @@ const Main: React.FC = ({ route }) => { useEffect(() => { let websocket: WebSocket; try { - console.log("Connecting to WebSocket at " + scannedData); + // console.log("Connecting to WebSocket at " + scannedData); + setWsUrl(scannedData); websocket = new WebSocket(scannedData); websocket.binaryType = "blob"; websocket.onopen = () => { setConnectionStatus(`Connected`); - // setConnectionStatus(`Connected to ${scannedData}`); - console.log("WebSocket connected"); }; websocket.onmessage = async (e) => { @@ -170,15 +184,11 @@ const Main: React.FC = ({ route }) => { const message = JSON.parse(e.data); if (message.content && message.type == "audio") { - console.log("✅✅✅✅✅✅✅✅✅✅✅✅✅✅✅✅✅✅✅ Audio message"); - const buffer = message.content; - // console.log(buffer.length); if (buffer && buffer.length > 0) { const filePath = await constructTempFilePath(buffer); if (filePath !== null) { addToQueue(filePath); - // console.log("audio file written to", filePath); } else { console.error("Failed to create file path"); } @@ -198,7 +208,6 @@ const Main: React.FC = ({ route }) => { websocket.onclose = () => { setConnectionStatus("Disconnected."); - console.log("WebSocket disconnected"); }; setWs(websocket); @@ -212,170 +221,41 @@ const Main: React.FC = ({ route }) => { websocket.close(); } }; - }, [scannedData]); - - useEffect(() => { - console.log("Permission Response:", permissionResponse); - if (permissionResponse?.status !== "granted") { - console.log("Requesting permission.."); - requestPermission(); - } - }, []); - - const startRecording = useCallback(async () => { - if (recording) { - console.log("A recording is already in progress."); - return; - } - - try { - console.log("🌶️🌶️🌶️🌶️🌶️🌶️🌶️🌶️🌶️🌶️"); - - console.log(permissionResponse); - - if ( - permissionResponse !== null && - permissionResponse.status !== `granted` - ) { - console.log("Requesting permission.."); - await requestPermission(); - } - - await Audio.setAudioModeAsync({ - allowsRecordingIOS: true, - playsInSilentModeIOS: true, - }); - - console.log("Starting recording.."); - const newRecording = new Audio.Recording(); - await newRecording.prepareToRecordAsync( - Audio.RecordingOptionsPresets.HIGH_QUALITY - ); - await newRecording.startAsync(); - - setRecording(newRecording); - } catch (err) { - console.error("Failed to start recording", err); - } - }, []); - - const stopRecording = useCallback(async () => { - console.log("Stopping recording.."); - - if (recording) { - await recording.stopAndUnloadAsync(); - await Audio.setAudioModeAsync({ - allowsRecordingIOS: false, - }); - const uri = recording.getURI(); - // console.log("recording uri at ", uri); - setRecording(null); - - if (ws && uri) { - const response = await fetch(uri); - // console.log("fetched audio file", response); - const blob = await response.blob(); - - const reader = new FileReader(); - reader.readAsArrayBuffer(blob); - reader.onloadend = () => { - const audioBytes = reader.result; - if (audioBytes) { - ws.send(audioBytes); - const audioArray = new Uint8Array(audioBytes as ArrayBuffer); - const decoder = new TextDecoder("utf-8"); - // console.log( - // "sent audio bytes to WebSocket", - // decoder.decode(audioArray).slice(0, 50) - // ); - } - }; - } - } - }, [recording]); - - const toggleRecording = (shouldPress: boolean) => { - Animated.timing(backgroundColorAnim, { - toValue: shouldPress ? 1 : 0, - duration: 400, - useNativeDriver: false, // 'backgroundColor' does not support native driver - }).start(); - Animated.timing(buttonBackgroundColorAnim, { - toValue: shouldPress ? 1 : 0, - duration: 400, - useNativeDriver: false, // 'backgroundColor' does not support native driver - }).start(); - }; - - useEffect(() => { - const backAction = () => { - navigation.navigate('Home'); // Always navigate back to Home - return true; // Prevent default action - }; - - // Add event listener for hardware back button on Android - const backHandler = BackHandler.addEventListener( - 'hardwareBackPress', - backAction - ); - - return () => backHandler.remove(); - }, [navigation]); - + }, [scannedData, rescan]); return ( - {/* { - console.log("hi!"); - - navigation.navigate("Camera"); - }} - > - - - - */} - {/* */} - - {connectionStatus} - + { - playPip(); - setIsPressed(true); - toggleRecording(true); // Pass true when pressed - startRecording(); - Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy); - }} - onPressOut={() => { - playPop(); - setIsPressed(false); - toggleRecording(false); // Pass false when released - stopRecording(); - Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy); + style={styles.statusButton} + onPress={() => { + setRescan(!rescan); }} > - - {/* - Record - */} - + {connectionStatus} + @@ -418,27 +298,14 @@ const styles = StyleSheet.create({ paddingTop: 50, }, - button: { - width: 100, - height: 100, - borderRadius: 50, - justifyContent: "center", - alignItems: "center", - }, - buttonTextDefault: { - color: "black", - fontSize: 16, - }, - buttonTextRecording: { - color: "white", - fontSize: 16, - }, statusText: { + fontSize: 12, + fontWeight: "bold", + }, + statusButton: { position: "absolute", bottom: 20, alignSelf: "center", - fontSize: 12, - fontWeight: "bold", }, }); diff --git a/software/source/clients/ios/react-native/src/utils/RecordButton.ts b/software/source/clients/ios/react-native/src/utils/RecordButton.ts deleted file mode 100644 index 30f6192..0000000 --- a/software/source/clients/ios/react-native/src/utils/RecordButton.ts +++ /dev/null @@ -1,180 +0,0 @@ -import React, { useState, useEffect, useCallback, useRef } from "react"; -import { View, Text, TouchableOpacity, StyleSheet, Image, Touchable } from "react-native"; -import * as FileSystem from "expo-file-system"; -import { AVPlaybackStatus, AVPlaybackStatusSuccess, Audio } from "expo-av"; -import { create } from "zustand"; -import useStore from "../utils/state"; -import { Animated } from "react-native"; -import * as Haptics from "expo-haptics"; -import useSoundEffect from "../utils/useSoundEffect"; - -import { useNavigation } from "@react-navigation/native"; - -interface RecordButtonProps { - playPip: () => void; - playPop: () => void; - recording: Audio.Recording | null; - setRecording: (recording: Audio.Recording | null) => void; - ws: WebSocket | null; - backgroundColorAnim: Animated.Value; - buttonBackgroundColorAnim: Animated.Value; - setIsPressed: (isPressed: boolean) => void; -} - - -const styles = StyleSheet.create({ - container: { - flex: 1, - position: "relative", - }, - middle: { - flex: 1, - justifyContent: "center", - alignItems: "center", - padding: 10, - position: "relative", - }, - circle: { - width: 100, - height: 100, - borderRadius: 50, - justifyContent: "center", - alignItems: "center", - }, - qr: { - position: "absolute", - top: 30, - left: 10, - padding: 10, - zIndex: 100, - }, - icon: { - height: 40, - width: 40, - }, - topBar: { - height: 40, - backgroundColor: "#000", - paddingTop: 50, - }, - - button: { - width: 100, - height: 100, - borderRadius: 50, - justifyContent: "center", - alignItems: "center", - }, - buttonTextDefault: { - color: "black", - fontSize: 16, - }, - buttonTextRecording: { - color: "white", - fontSize: 16, - }, - statusText: { - position: "absolute", - bottom: 20, - alignSelf: "center", - fontSize: 12, - fontWeight: "bold", - }, - }); - - -const RecordButton = ({ playPip, playPop, recording, setRecording, ws, backgroundColorAnim, buttonBackgroundColorAnim, setIsPressed}: RecordButtonProps) => { - const [permissionResponse, requestPermission] = Audio.usePermissions(); - - useEffect(() => { - console.log("Permission Response:", permissionResponse); - if (permissionResponse?.status !== "granted") { - console.log("Requesting permission.."); - requestPermission(); - } - }, []); - - const startRecording = useCallback(async () => { - if (recording) { - console.log("A recording is already in progress."); - return; - } - - try { - console.log("🌶️🌶️🌶️🌶️🌶️🌶️🌶️🌶️🌶️🌶️"); - - console.log(permissionResponse); - - if ( - permissionResponse !== null && - permissionResponse.status !== `granted` - ) { - console.log("Requesting permission.."); - await requestPermission(); - } - - await Audio.setAudioModeAsync({ - allowsRecordingIOS: true, - playsInSilentModeIOS: true, - }); - - console.log("Starting recording.."); - const newRecording = new Audio.Recording(); - await newRecording.prepareToRecordAsync( - Audio.RecordingOptionsPresets.HIGH_QUALITY - ); - await newRecording.startAsync(); - - setRecording(newRecording); - } catch (err) { - console.error("Failed to start recording", err); - } - }, []); - - const stopRecording = useCallback(async () => { - console.log("Stopping recording.."); - - if (recording) { - await recording.stopAndUnloadAsync(); - await Audio.setAudioModeAsync({ - allowsRecordingIOS: false, - }); - const uri = recording.getURI(); - // console.log("recording uri at ", uri); - setRecording(null); - - if (ws && uri) { - const response = await fetch(uri); - // console.log("fetched audio file", response); - const blob = await response.blob(); - - const reader = new FileReader(); - reader.readAsArrayBuffer(blob); - reader.onloadend = () => { - const audioBytes = reader.result; - if (audioBytes) { - ws.send(audioBytes); - } - }; - } - } - }, [recording]); - - const toggleRecording = (shouldPress: boolean) => { - Animated.timing(backgroundColorAnim, { - toValue: shouldPress ? 1 : 0, - duration: 400, - useNativeDriver: false, // 'backgroundColor' does not support native driver - }).start(); - Animated.timing(buttonBackgroundColorAnim, { - toValue: shouldPress ? 1 : 0, - duration: 400, - useNativeDriver: false, // 'backgroundColor' does not support native driver - }).start(); - }; - - return ( - ); -}; - -export default RecordButton; diff --git a/software/source/clients/ios/react-native/src/utils/RecordButton.tsx b/software/source/clients/ios/react-native/src/utils/RecordButton.tsx new file mode 100644 index 0000000..ffdaeb0 --- /dev/null +++ b/software/source/clients/ios/react-native/src/utils/RecordButton.tsx @@ -0,0 +1,151 @@ +import React, { useEffect, useCallback } from "react"; +import { TouchableOpacity, StyleSheet } from "react-native"; +import { Audio } from "expo-av"; +import { Animated } from "react-native"; +import * as Haptics from "expo-haptics"; + +interface RecordButtonProps { + playPip: () => void; + playPop: () => void; + recording: Audio.Recording | null; + setRecording: (recording: Audio.Recording | null) => void; + ws: WebSocket | null; + buttonBackgroundColorAnim: Animated.Value; + backgroundColorAnim: Animated.Value; + backgroundColor: Animated.AnimatedInterpolation; + buttonBackgroundColor: Animated.AnimatedInterpolation; + setIsPressed: (isPressed: boolean) => void; +} + +const styles = StyleSheet.create({ + circle: { + width: 100, + height: 100, + borderRadius: 50, + justifyContent: "center", + alignItems: "center", + }, + button: { + width: 100, + height: 100, + borderRadius: 50, + justifyContent: "center", + alignItems: "center", + }, +}); + +const RecordButton: React.FC = ({ + playPip, + playPop, + recording, + setRecording, + ws, + backgroundColorAnim, + buttonBackgroundColorAnim, + backgroundColor, + buttonBackgroundColor, + setIsPressed, +}: RecordButtonProps) => { + const [permissionResponse, requestPermission] = Audio.usePermissions(); + + useEffect(() => { + if (permissionResponse?.status !== "granted") { + requestPermission(); + } + }, []); + + const startRecording = useCallback(async () => { + if (recording) { + console.log("A recording is already in progress."); + return; + } + + try { + if ( + permissionResponse !== null && + permissionResponse.status !== `granted` + ) { + await requestPermission(); + } + + await Audio.setAudioModeAsync({ + allowsRecordingIOS: true, + playsInSilentModeIOS: true, + }); + + const newRecording = new Audio.Recording(); + await newRecording.prepareToRecordAsync( + Audio.RecordingOptionsPresets.HIGH_QUALITY + ); + await newRecording.startAsync(); + + setRecording(newRecording); + } catch (err) { + console.error("Failed to start recording", err); + } + }, []); + + const stopRecording = useCallback(async () => { + if (recording) { + await recording.stopAndUnloadAsync(); + await Audio.setAudioModeAsync({ + allowsRecordingIOS: false, + }); + const uri = recording.getURI(); + setRecording(null); + + if (ws && uri) { + const response = await fetch(uri); + const blob = await response.blob(); + + const reader = new FileReader(); + reader.readAsArrayBuffer(blob); + reader.onloadend = () => { + const audioBytes = reader.result; + if (audioBytes) { + ws.send(audioBytes); + } + }; + } + } + }, [recording]); + + const toggleRecording = (shouldPress: boolean) => { + Animated.timing(backgroundColorAnim, { + toValue: shouldPress ? 1 : 0, + duration: 400, + useNativeDriver: false, + }).start(); + Animated.timing(buttonBackgroundColorAnim, { + toValue: shouldPress ? 1 : 0, + duration: 400, + useNativeDriver: false, + }).start(); + }; + + return ( + { + playPip(); + setIsPressed(true); + toggleRecording(true); + startRecording(); + Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy); + }} + onPressOut={() => { + playPop(); + setIsPressed(false); + toggleRecording(false); + stopRecording(); + Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy); + }} + > + + + ); +}; + +export default RecordButton; diff --git a/software/source/clients/ios/react-native/src/utils/useSoundEffect.ts b/software/source/clients/ios/react-native/src/utils/useSoundEffect.ts index 250353c..5e73fec 100644 --- a/software/source/clients/ios/react-native/src/utils/useSoundEffect.ts +++ b/software/source/clients/ios/react-native/src/utils/useSoundEffect.ts @@ -1,20 +1,11 @@ import { useEffect, useState } from "react"; -import { Audio, InterruptionModeAndroid, InterruptionModeIOS } from "expo-av"; +import { Audio } from "expo-av"; -const useSoundEffect = (soundFile) => { - const [sound, setSound] = useState(null); // Explicitly set initial state to null +const useSoundEffect = (soundFile: any) => { + const [sound, setSound] = useState(null); // Explicitly set initial state to null useEffect(() => { const loadSound = async () => { - // await Audio.setAudioModeAsync({ - // staysActiveInBackground: true, - // shouldDuckAndroid: true, - // playThroughEarpieceAndroid: false, - // interruptionModeIOS: InterruptionModeIOS.DoNotMix, - // interruptionModeAndroid: InterruptionModeAndroid.DoNotMix, - // allowsRecordingIOS: false, - // playsInSilentModeIOS: true, - // }); const { sound: newSound } = await Audio.Sound.createAsync(soundFile); setSound(newSound); };