@ -0,0 +1,83 @@
|
|||||||
|
|
||||||
|
## For End Users
|
||||||
|
[Announcment video](https://www.youtube.com/watch?v=jWr-WeXAdeI)
|
||||||
|
[Wes Roth](https://www.youtube.com/@WesRoth)
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Details</summary>
|
||||||
|
|
||||||
|
No technical coverage
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Announcment video](https://www.youtube.com/watch?v=JaBFT3fF2fk)
|
||||||
|
[TheAIGRID](https://www.youtube.com/@TheAiGrid)
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Details</summary>
|
||||||
|
|
||||||
|
[here](https://youtu.be/JaBFT3fF2fk?si=8zPGO-U6WdLNnISw&t=656)
|
||||||
|
mentions the current lack of windows support
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Announcment video](https://www.youtube.com/watch?v=Q_p82HtBqoc)
|
||||||
|
[Matt Berman](https://www.youtube.com/@matthew_berman)
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Details</summary>
|
||||||
|
|
||||||
|
[here](https://youtu.be/Q_p82HtBqoc?si=aAxjWZnBdwBbaOUr&t=579)
|
||||||
|
Berman shows an install of 01 using conda and python 3.9
|
||||||
|
in.. looks like linux.. shows how to get openai keys.
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Announcment video](https://www.youtube.com/watch?v=q0dJ7T7au2Y)
|
||||||
|
[WorldofAI](https://www.youtube.com/@intheworldofai)
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Details</summary>
|
||||||
|
|
||||||
|
<!-- Add details here -->
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Breakdown video](https://www.youtube.com/watch?v=W-VwN0n4d9Y)
|
||||||
|
[Mervin Praison](https://www.youtube.com/@MervinPraison)
|
||||||
|
<details>
|
||||||
|
<summary>Details</summary>
|
||||||
|
- uses conda to install 01 and uses python 3.11 on linux.. maybe mac
|
||||||
|
- 0:00 Introduction to Open Interpreter
|
||||||
|
- 0:47 Creating Apps and Summarizing Documents
|
||||||
|
- 1:20 Image Modifications and Game Creation
|
||||||
|
- 2:55 Exploratory Data Analysis and Charting
|
||||||
|
- 4:00 Server Log Analysis
|
||||||
|
- 5:01 Image and Video Editing
|
||||||
|
- 6:00 Composing Music with AI
|
||||||
|
- 7:18 Calendar Management and Email Automation
|
||||||
|
- 9:01 Integrating with Fast API and LM Studio
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Breakdown video](https://www.youtube.com/watch?v=uyfoHQVgeY0)
|
||||||
|
[Gary Explains](https://www.youtube.com/@GaryExplains)
|
||||||
|
<br>for **open interpreter** not **01**
|
||||||
|
<details>
|
||||||
|
<summary>Details</summary>
|
||||||
|
- 3:45 states that it will run on mac/linux and windows and requires python 3.10
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## For Developers
|
||||||
|
<BR>
|
||||||
|
Coming soon
|
@ -1,13 +0,0 @@
|
|||||||
# iOS/Android Client
|
|
||||||
|
|
||||||
[WORK IN PROGRESS]
|
|
||||||
|
|
||||||
This repository contains the source code for the 01 iOS/Android app. Work in progress, we will continue to improve this application to get it working properly.
|
|
||||||
|
|
||||||
Feel free to improve this and make a pull request!
|
|
||||||
|
|
||||||
If you want to run it on your own, you will need expo.
|
|
||||||
|
|
||||||
1. Install dependencies `npm install`
|
|
||||||
2. Run the app `npx expo start`
|
|
||||||
3. Open the app in your simulator or on your device with the expo app by scanning the QR code
|
|
@ -1,22 +0,0 @@
|
|||||||
import * as React from "react";
|
|
||||||
import { NavigationContainer } from "@react-navigation/native";
|
|
||||||
import { createNativeStackNavigator } from "@react-navigation/native-stack";
|
|
||||||
import HomeScreen from "./src/screens/HomeScreen";
|
|
||||||
import CameraScreen from "./src/screens/Camera";
|
|
||||||
import Main from "./src/screens/Main";
|
|
||||||
|
|
||||||
const Stack = createNativeStackNavigator();
|
|
||||||
|
|
||||||
function App() {
|
|
||||||
return (
|
|
||||||
<NavigationContainer>
|
|
||||||
<Stack.Navigator initialRouteName="Home">
|
|
||||||
<Stack.Screen name="Home" component={HomeScreen} />
|
|
||||||
<Stack.Screen name="Camera" component={CameraScreen} />
|
|
||||||
<Stack.Screen name="Main" component={Main} />
|
|
||||||
</Stack.Navigator>
|
|
||||||
</NavigationContainer>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default App;
|
|
@ -1,171 +0,0 @@
|
|||||||
import React, { useState, useEffect } from "react";
|
|
||||||
import { View, Text, TouchableOpacity, StyleSheet } from "react-native";
|
|
||||||
import { Audio } from "expo-av";
|
|
||||||
|
|
||||||
interface MainProps {
|
|
||||||
route: {
|
|
||||||
params: {
|
|
||||||
scannedData: string;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const Main: React.FC<MainProps> = ({ route }) => {
|
|
||||||
const { scannedData } = route.params;
|
|
||||||
|
|
||||||
const [connectionStatus, setConnectionStatus] =
|
|
||||||
useState<string>("Connecting...");
|
|
||||||
const [ws, setWs] = useState<WebSocket | null>(null);
|
|
||||||
const [recording, setRecording] = useState<Audio.Recording | null>(null);
|
|
||||||
const [audioQueue, setAudioQueue] = useState<string[]>([]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
const playNextAudio = async () => {
|
|
||||||
if (audioQueue.length > 0) {
|
|
||||||
const uri = audioQueue.shift();
|
|
||||||
const { sound } = await Audio.Sound.createAsync(
|
|
||||||
{ uri: uri! },
|
|
||||||
{ shouldPlay: true }
|
|
||||||
);
|
|
||||||
sound.setOnPlaybackStatusUpdate(async (status) => {
|
|
||||||
if (status.didJustFinish && !status.isLooping) {
|
|
||||||
await sound.unloadAsync();
|
|
||||||
playNextAudio();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let websocket: WebSocket;
|
|
||||||
try {
|
|
||||||
console.log("Connecting to WebSocket at " + scannedData);
|
|
||||||
websocket = new WebSocket(scannedData);
|
|
||||||
|
|
||||||
websocket.onopen = () => {
|
|
||||||
setConnectionStatus(`Connected to ${scannedData}`);
|
|
||||||
console.log("WebSocket connected");
|
|
||||||
};
|
|
||||||
websocket.onmessage = async (e) => {
|
|
||||||
console.log("Received message: ", e.data);
|
|
||||||
setAudioQueue((prevQueue) => [...prevQueue, e.data]);
|
|
||||||
if (audioQueue.length === 1) {
|
|
||||||
playNextAudio();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
websocket.onerror = (error) => {
|
|
||||||
setConnectionStatus("Error connecting to WebSocket.");
|
|
||||||
console.error("WebSocket error: ", error);
|
|
||||||
};
|
|
||||||
|
|
||||||
websocket.onclose = () => {
|
|
||||||
setConnectionStatus("Disconnected.");
|
|
||||||
console.log("WebSocket disconnected");
|
|
||||||
};
|
|
||||||
|
|
||||||
setWs(websocket);
|
|
||||||
} catch (error) {
|
|
||||||
console.log(error);
|
|
||||||
setConnectionStatus("Error creating WebSocket.");
|
|
||||||
}
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
if (websocket) {
|
|
||||||
websocket.close();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}, [scannedData, audioQueue]);
|
|
||||||
|
|
||||||
const startRecording = async () => {
|
|
||||||
if (recording) {
|
|
||||||
console.log("A recording is already in progress.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
console.log("Requesting permissions..");
|
|
||||||
await Audio.requestPermissionsAsync();
|
|
||||||
await Audio.setAudioModeAsync({
|
|
||||||
allowsRecordingIOS: true,
|
|
||||||
playsInSilentModeIOS: true,
|
|
||||||
});
|
|
||||||
console.log("Starting recording..");
|
|
||||||
const { recording: newRecording } = await Audio.Recording.createAsync(
|
|
||||||
Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY
|
|
||||||
);
|
|
||||||
setRecording(newRecording);
|
|
||||||
console.log("Recording started");
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Failed to start recording", err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const stopRecording = async () => {
|
|
||||||
console.log("Stopping recording..");
|
|
||||||
setRecording(null);
|
|
||||||
if (recording) {
|
|
||||||
await recording.stopAndUnloadAsync();
|
|
||||||
const uri = recording.getURI();
|
|
||||||
console.log("Recording stopped and stored at", uri);
|
|
||||||
if (ws && uri) {
|
|
||||||
ws.send(uri);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<View style={styles.container}>
|
|
||||||
<Text
|
|
||||||
style={[
|
|
||||||
styles.statusText,
|
|
||||||
{ color: connectionStatus.startsWith("Connected") ? "green" : "red" },
|
|
||||||
]}
|
|
||||||
>
|
|
||||||
{connectionStatus}
|
|
||||||
</Text>
|
|
||||||
<TouchableOpacity
|
|
||||||
style={styles.button}
|
|
||||||
onPressIn={startRecording}
|
|
||||||
onPressOut={stopRecording}
|
|
||||||
>
|
|
||||||
<View style={styles.circle}>
|
|
||||||
<Text style={styles.buttonText}>Record</Text>
|
|
||||||
</View>
|
|
||||||
</TouchableOpacity>
|
|
||||||
</View>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
const styles = StyleSheet.create({
|
|
||||||
container: {
|
|
||||||
flex: 1,
|
|
||||||
justifyContent: "center",
|
|
||||||
alignItems: "center",
|
|
||||||
backgroundColor: "#fff",
|
|
||||||
},
|
|
||||||
circle: {
|
|
||||||
width: 100,
|
|
||||||
height: 100,
|
|
||||||
borderRadius: 50,
|
|
||||||
backgroundColor: "black",
|
|
||||||
justifyContent: "center",
|
|
||||||
alignItems: "center",
|
|
||||||
},
|
|
||||||
button: {
|
|
||||||
width: 100,
|
|
||||||
height: 100,
|
|
||||||
borderRadius: 50,
|
|
||||||
justifyContent: "center",
|
|
||||||
alignItems: "center",
|
|
||||||
},
|
|
||||||
buttonText: {
|
|
||||||
color: "white",
|
|
||||||
fontSize: 16,
|
|
||||||
},
|
|
||||||
statusText: {
|
|
||||||
marginBottom: 20,
|
|
||||||
fontSize: 16,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
export default Main;
|
|
@ -0,0 +1,32 @@
|
|||||||
|
# iOS/Android Client
|
||||||
|
|
||||||
|
**_WORK IN PROGRESS_**
|
||||||
|
|
||||||
|
This repository contains the source code for the 01 iOS/Android app. Work in progress, we will continue to improve this application to get it working properly.
|
||||||
|
|
||||||
|
Feel free to improve this and make a pull request!
|
||||||
|
|
||||||
|
If you want to run it on your own, you will need to install Expo Go on your mobile device.
|
||||||
|
|
||||||
|
## Setup Instructions
|
||||||
|
|
||||||
|
Follow the **[software setup steps](https://github.com/OpenInterpreter/01?tab=readme-ov-file#software)** in the main repo's README first before you read this
|
||||||
|
|
||||||
|
```shell
|
||||||
|
cd software/source/clients/mobile/react-native # cd into `react-native`
|
||||||
|
npm install # install dependencies
|
||||||
|
npx expo start # start local development server
|
||||||
|
```
|
||||||
|
|
||||||
|
In **Expo Go** select _Scan QR code_ to scan the QR code produced by the `npx expo start` command
|
||||||
|
|
||||||
|
## Using the App
|
||||||
|
|
||||||
|
```shell
|
||||||
|
cd software # cd into `software`
|
||||||
|
poetry run 01 --mobile # exposes QR code for 01 Light server
|
||||||
|
```
|
||||||
|
|
||||||
|
In the app, select _Scan Code_ to scan the QR code produced by the `poetry run 01 --mobile` command
|
||||||
|
|
||||||
|
Press and hold the button to speak, release to make the request. To rescan the QR code, swipe left on the screen to go back.
|
@ -0,0 +1,31 @@
|
|||||||
|
import * as React from "react";
|
||||||
|
import { NavigationContainer } from "@react-navigation/native";
|
||||||
|
import { createNativeStackNavigator } from "@react-navigation/native-stack";
|
||||||
|
import HomeScreen from "./src/screens/HomeScreen";
|
||||||
|
import CameraScreen from "./src/screens/Camera";
|
||||||
|
import Main from "./src/screens/Main";
|
||||||
|
import { StatusBar } from "expo-status-bar";
|
||||||
|
|
||||||
|
const Stack = createNativeStackNavigator();
|
||||||
|
|
||||||
|
function App() {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<StatusBar style="light" />
|
||||||
|
<NavigationContainer>
|
||||||
|
<Stack.Navigator
|
||||||
|
initialRouteName="Home"
|
||||||
|
screenOptions={{
|
||||||
|
headerShown: false, // This hides the navigation bar globally
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<Stack.Screen name="Home" component={HomeScreen} />
|
||||||
|
<Stack.Screen name="Camera" component={CameraScreen} />
|
||||||
|
<Stack.Screen name="Main" component={Main} />
|
||||||
|
</Stack.Navigator>
|
||||||
|
</NavigationContainer>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default App;
|
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 17 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 22 KiB |
Before Width: | Height: | Size: 46 KiB After Width: | Height: | Size: 46 KiB |
@ -0,0 +1,285 @@
|
|||||||
|
import React, { useState, useEffect, useCallback, useRef } from "react";
|
||||||
|
import {
|
||||||
|
View,
|
||||||
|
Text,
|
||||||
|
TouchableOpacity,
|
||||||
|
StyleSheet,
|
||||||
|
BackHandler,
|
||||||
|
} from "react-native";
|
||||||
|
import * as FileSystem from "expo-file-system";
|
||||||
|
import { Audio } from "expo-av";
|
||||||
|
import { polyfill as polyfillEncoding } from "react-native-polyfill-globals/src/encoding";
|
||||||
|
import { Animated } from "react-native";
|
||||||
|
import useSoundEffect from "../utils/useSoundEffect";
|
||||||
|
import RecordButton from "../utils/RecordButton";
|
||||||
|
import { useNavigation } from "@react-navigation/core";
|
||||||
|
|
||||||
|
interface MainProps {
|
||||||
|
route: {
|
||||||
|
params: {
|
||||||
|
scannedData: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const Main: React.FC<MainProps> = ({ route }) => {
|
||||||
|
const { scannedData } = route.params;
|
||||||
|
const [connectionStatus, setConnectionStatus] =
|
||||||
|
useState<string>("Connecting...");
|
||||||
|
const [ws, setWs] = useState<WebSocket | null>(null);
|
||||||
|
const [wsUrl, setWsUrl] = useState("");
|
||||||
|
const [rescan, setRescan] = useState(false);
|
||||||
|
const [isPressed, setIsPressed] = useState(false);
|
||||||
|
const [recording, setRecording] = useState<Audio.Recording | null>(null);
|
||||||
|
const audioQueueRef = useRef<String[]>([]);
|
||||||
|
const soundRef = useRef<Audio.Sound | null>(null);
|
||||||
|
const [soundUriMap, setSoundUriMap] = useState<Map<Audio.Sound, string>>(
|
||||||
|
new Map()
|
||||||
|
);
|
||||||
|
const audioDir = FileSystem.documentDirectory + "01/audio/";
|
||||||
|
const [permissionResponse, requestPermission] = Audio.usePermissions();
|
||||||
|
polyfillEncoding();
|
||||||
|
const backgroundColorAnim = useRef(new Animated.Value(0)).current;
|
||||||
|
const buttonBackgroundColorAnim = useRef(new Animated.Value(0)).current;
|
||||||
|
const playPip = useSoundEffect(require("../../assets/pip.mp3"));
|
||||||
|
const playPop = useSoundEffect(require("../../assets/pop.mp3"));
|
||||||
|
const navigation = useNavigation();
|
||||||
|
const backgroundColor = backgroundColorAnim.interpolate({
|
||||||
|
inputRange: [0, 1],
|
||||||
|
outputRange: ["black", "white"],
|
||||||
|
});
|
||||||
|
const buttonBackgroundColor = backgroundColorAnim.interpolate({
|
||||||
|
inputRange: [0, 1],
|
||||||
|
outputRange: ["white", "black"],
|
||||||
|
});
|
||||||
|
|
||||||
|
const constructTempFilePath = async (buffer: string) => {
|
||||||
|
try {
|
||||||
|
await dirExists();
|
||||||
|
if (!buffer) {
|
||||||
|
console.log("Buffer is undefined or empty.");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const tempFilePath = `${audioDir}${Date.now()}.wav`;
|
||||||
|
|
||||||
|
await FileSystem.writeAsStringAsync(tempFilePath, buffer, {
|
||||||
|
encoding: FileSystem.EncodingType.Base64,
|
||||||
|
});
|
||||||
|
|
||||||
|
return tempFilePath;
|
||||||
|
} catch (error) {
|
||||||
|
console.log("Failed to construct temp file path:", error);
|
||||||
|
return null; // Return null to prevent crashing, error is logged
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
async function dirExists() {
|
||||||
|
/**
|
||||||
|
* Checks if audio directory exists in device storage, if not creates it.
|
||||||
|
*/
|
||||||
|
try {
|
||||||
|
const dirInfo = await FileSystem.getInfoAsync(audioDir);
|
||||||
|
if (!dirInfo.exists) {
|
||||||
|
console.error("audio directory doesn't exist, creating...");
|
||||||
|
await FileSystem.makeDirectoryAsync(audioDir, { intermediates: true });
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error checking or creating directory:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const playNextAudio = useCallback(async () => {
|
||||||
|
if (audioQueueRef.current.length > 0 && soundRef.current == null) {
|
||||||
|
const uri = audioQueueRef.current.at(0) as string;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { sound: newSound } = await Audio.Sound.createAsync({ uri });
|
||||||
|
soundRef.current = newSound;
|
||||||
|
setSoundUriMap(new Map(soundUriMap.set(newSound, uri)));
|
||||||
|
await newSound.playAsync();
|
||||||
|
newSound.setOnPlaybackStatusUpdate(_onPlayBackStatusUpdate);
|
||||||
|
} catch (error) {
|
||||||
|
console.log("Error playing audio", error);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// audioQueue is empty or sound is not null
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
},[]);
|
||||||
|
|
||||||
|
const _onPlayBackStatusUpdate = useCallback(
|
||||||
|
async (status: any) => {
|
||||||
|
if (status.didJustFinish) {
|
||||||
|
audioQueueRef.current.shift();
|
||||||
|
await soundRef.current?.unloadAsync();
|
||||||
|
if (soundRef.current) {
|
||||||
|
soundUriMap.delete(soundRef.current);
|
||||||
|
setSoundUriMap(new Map(soundUriMap));
|
||||||
|
}
|
||||||
|
soundRef.current = null;
|
||||||
|
playNextAudio();
|
||||||
|
}
|
||||||
|
},[]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const backAction = () => {
|
||||||
|
navigation.navigate("Home"); // Always navigate back to Home
|
||||||
|
return true; // Prevent default action
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add event listener for hardware back button on Android
|
||||||
|
const backHandler = BackHandler.addEventListener(
|
||||||
|
"hardwareBackPress",
|
||||||
|
backAction
|
||||||
|
);
|
||||||
|
|
||||||
|
return () => backHandler.remove();
|
||||||
|
}, [navigation]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
let websocket: WebSocket;
|
||||||
|
try {
|
||||||
|
// console.log("Connecting to WebSocket at " + scannedData);
|
||||||
|
setWsUrl(scannedData);
|
||||||
|
websocket = new WebSocket(scannedData);
|
||||||
|
websocket.binaryType = "blob";
|
||||||
|
|
||||||
|
websocket.onopen = () => {
|
||||||
|
setConnectionStatus(`Connected`);
|
||||||
|
};
|
||||||
|
|
||||||
|
websocket.onmessage = async (e) => {
|
||||||
|
try {
|
||||||
|
const message = JSON.parse(e.data);
|
||||||
|
|
||||||
|
if (message.content && message.type == "audio") {
|
||||||
|
const buffer = message.content;
|
||||||
|
if (buffer && buffer.length > 0) {
|
||||||
|
const filePath = await constructTempFilePath(buffer);
|
||||||
|
if (filePath !== null) {
|
||||||
|
audioQueueRef.current.push(filePath);
|
||||||
|
|
||||||
|
if (audioQueueRef.current.length == 1) {
|
||||||
|
playNextAudio();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.error("Failed to create file path");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.error("Received message is empty or undefined");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error handling WebSocket message:", error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
websocket.onerror = (error) => {
|
||||||
|
setConnectionStatus("Error connecting to WebSocket.");
|
||||||
|
console.error("WebSocket error: ", error);
|
||||||
|
};
|
||||||
|
|
||||||
|
websocket.onclose = () => {
|
||||||
|
setConnectionStatus("Disconnected.");
|
||||||
|
};
|
||||||
|
|
||||||
|
setWs(websocket);
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
setConnectionStatus("Error creating WebSocket.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
if (websocket) {
|
||||||
|
websocket.close();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [scannedData, rescan]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Animated.View style={[styles.container, { backgroundColor }]}>
|
||||||
|
<View style={styles.middle}>
|
||||||
|
<RecordButton
|
||||||
|
playPip={playPip}
|
||||||
|
playPop={playPop}
|
||||||
|
recording={recording}
|
||||||
|
setRecording={setRecording}
|
||||||
|
ws={ws}
|
||||||
|
backgroundColorAnim={backgroundColorAnim}
|
||||||
|
buttonBackgroundColorAnim={buttonBackgroundColorAnim}
|
||||||
|
backgroundColor={backgroundColor}
|
||||||
|
buttonBackgroundColor={buttonBackgroundColor}
|
||||||
|
setIsPressed={setIsPressed}
|
||||||
|
/>
|
||||||
|
<TouchableOpacity
|
||||||
|
style={styles.statusButton}
|
||||||
|
onPress={() => {
|
||||||
|
setRescan(!rescan);
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<Text
|
||||||
|
style={[
|
||||||
|
styles.statusText,
|
||||||
|
{
|
||||||
|
color: connectionStatus.startsWith("Connected")
|
||||||
|
? "green"
|
||||||
|
: "red",
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
>
|
||||||
|
{connectionStatus}
|
||||||
|
</Text>
|
||||||
|
</TouchableOpacity>
|
||||||
|
</View>
|
||||||
|
</Animated.View>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const styles = StyleSheet.create({
|
||||||
|
container: {
|
||||||
|
flex: 1,
|
||||||
|
position: "relative",
|
||||||
|
},
|
||||||
|
middle: {
|
||||||
|
flex: 1,
|
||||||
|
justifyContent: "center",
|
||||||
|
alignItems: "center",
|
||||||
|
padding: 10,
|
||||||
|
position: "relative",
|
||||||
|
},
|
||||||
|
circle: {
|
||||||
|
width: 100,
|
||||||
|
height: 100,
|
||||||
|
borderRadius: 50,
|
||||||
|
justifyContent: "center",
|
||||||
|
alignItems: "center",
|
||||||
|
},
|
||||||
|
qr: {
|
||||||
|
position: "absolute",
|
||||||
|
top: 30,
|
||||||
|
left: 10,
|
||||||
|
padding: 10,
|
||||||
|
zIndex: 100,
|
||||||
|
},
|
||||||
|
icon: {
|
||||||
|
height: 40,
|
||||||
|
width: 40,
|
||||||
|
},
|
||||||
|
topBar: {
|
||||||
|
height: 40,
|
||||||
|
backgroundColor: "#000",
|
||||||
|
paddingTop: 50,
|
||||||
|
},
|
||||||
|
|
||||||
|
statusText: {
|
||||||
|
fontSize: 12,
|
||||||
|
fontWeight: "bold",
|
||||||
|
},
|
||||||
|
statusButton: {
|
||||||
|
position: "absolute",
|
||||||
|
bottom: 20,
|
||||||
|
alignSelf: "center",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export default Main;
|
@ -0,0 +1,151 @@
|
|||||||
|
import React, { useEffect, useCallback } from "react";
|
||||||
|
import { TouchableOpacity, StyleSheet } from "react-native";
|
||||||
|
import { Audio } from "expo-av";
|
||||||
|
import { Animated } from "react-native";
|
||||||
|
import * as Haptics from "expo-haptics";
|
||||||
|
|
||||||
|
interface RecordButtonProps {
|
||||||
|
playPip: () => void;
|
||||||
|
playPop: () => void;
|
||||||
|
recording: Audio.Recording | null;
|
||||||
|
setRecording: (recording: Audio.Recording | null) => void;
|
||||||
|
ws: WebSocket | null;
|
||||||
|
buttonBackgroundColorAnim: Animated.Value;
|
||||||
|
backgroundColorAnim: Animated.Value;
|
||||||
|
backgroundColor: Animated.AnimatedInterpolation<string | number>;
|
||||||
|
buttonBackgroundColor: Animated.AnimatedInterpolation<string | number>;
|
||||||
|
setIsPressed: (isPressed: boolean) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
const styles = StyleSheet.create({
|
||||||
|
circle: {
|
||||||
|
width: 100,
|
||||||
|
height: 100,
|
||||||
|
borderRadius: 50,
|
||||||
|
justifyContent: "center",
|
||||||
|
alignItems: "center",
|
||||||
|
},
|
||||||
|
button: {
|
||||||
|
width: 100,
|
||||||
|
height: 100,
|
||||||
|
borderRadius: 50,
|
||||||
|
justifyContent: "center",
|
||||||
|
alignItems: "center",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const RecordButton: React.FC<RecordButtonProps> = ({
|
||||||
|
playPip,
|
||||||
|
playPop,
|
||||||
|
recording,
|
||||||
|
setRecording,
|
||||||
|
ws,
|
||||||
|
backgroundColorAnim,
|
||||||
|
buttonBackgroundColorAnim,
|
||||||
|
backgroundColor,
|
||||||
|
buttonBackgroundColor,
|
||||||
|
setIsPressed,
|
||||||
|
}: RecordButtonProps) => {
|
||||||
|
const [permissionResponse, requestPermission] = Audio.usePermissions();
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (permissionResponse?.status !== "granted") {
|
||||||
|
requestPermission();
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const startRecording = useCallback(async () => {
|
||||||
|
if (recording) {
|
||||||
|
console.log("A recording is already in progress.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (
|
||||||
|
permissionResponse !== null &&
|
||||||
|
permissionResponse.status !== `granted`
|
||||||
|
) {
|
||||||
|
await requestPermission();
|
||||||
|
}
|
||||||
|
|
||||||
|
await Audio.setAudioModeAsync({
|
||||||
|
allowsRecordingIOS: true,
|
||||||
|
playsInSilentModeIOS: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const newRecording = new Audio.Recording();
|
||||||
|
await newRecording.prepareToRecordAsync(
|
||||||
|
Audio.RecordingOptionsPresets.HIGH_QUALITY
|
||||||
|
);
|
||||||
|
await newRecording.startAsync();
|
||||||
|
|
||||||
|
setRecording(newRecording);
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Failed to start recording", err);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const stopRecording = useCallback(async () => {
|
||||||
|
if (recording) {
|
||||||
|
await recording.stopAndUnloadAsync();
|
||||||
|
await Audio.setAudioModeAsync({
|
||||||
|
allowsRecordingIOS: false,
|
||||||
|
});
|
||||||
|
const uri = recording.getURI();
|
||||||
|
setRecording(null);
|
||||||
|
|
||||||
|
if (ws && uri) {
|
||||||
|
const response = await fetch(uri);
|
||||||
|
const blob = await response.blob();
|
||||||
|
|
||||||
|
const reader = new FileReader();
|
||||||
|
reader.readAsArrayBuffer(blob);
|
||||||
|
reader.onloadend = () => {
|
||||||
|
const audioBytes = reader.result;
|
||||||
|
if (audioBytes) {
|
||||||
|
ws.send(audioBytes);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [recording]);
|
||||||
|
|
||||||
|
const toggleRecording = (shouldPress: boolean) => {
|
||||||
|
Animated.timing(backgroundColorAnim, {
|
||||||
|
toValue: shouldPress ? 1 : 0,
|
||||||
|
duration: 400,
|
||||||
|
useNativeDriver: false,
|
||||||
|
}).start();
|
||||||
|
Animated.timing(buttonBackgroundColorAnim, {
|
||||||
|
toValue: shouldPress ? 1 : 0,
|
||||||
|
duration: 400,
|
||||||
|
useNativeDriver: false,
|
||||||
|
}).start();
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<TouchableOpacity
|
||||||
|
style={styles.button}
|
||||||
|
onPressIn={() => {
|
||||||
|
playPip();
|
||||||
|
setIsPressed(true);
|
||||||
|
toggleRecording(true);
|
||||||
|
startRecording();
|
||||||
|
Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy);
|
||||||
|
}}
|
||||||
|
onPressOut={() => {
|
||||||
|
playPop();
|
||||||
|
setIsPressed(false);
|
||||||
|
toggleRecording(false);
|
||||||
|
stopRecording();
|
||||||
|
Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy);
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<Animated.View
|
||||||
|
style={[styles.circle, { backgroundColor: buttonBackgroundColor }]}
|
||||||
|
/>
|
||||||
|
</TouchableOpacity>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default RecordButton;
|
@ -0,0 +1,10 @@
|
|||||||
|
// store.js
|
||||||
|
import { create } from "zustand";
|
||||||
|
|
||||||
|
const useStore = create((set: any) => ({
|
||||||
|
count: 0,
|
||||||
|
increase: () => set((state: any) => ({ count: state.count + 1 })),
|
||||||
|
decrease: () => set((state: any) => ({ count: state.count - 1 })),
|
||||||
|
}));
|
||||||
|
|
||||||
|
export default useStore;
|
@ -0,0 +1,29 @@
|
|||||||
|
import { useEffect, useState } from "react";
|
||||||
|
import { Audio } from "expo-av";
|
||||||
|
|
||||||
|
const useSoundEffect = (soundFile: any) => {
|
||||||
|
const [sound, setSound] = useState<Audio.Sound | null>(null); // Explicitly set initial state to null
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const loadSound = async () => {
|
||||||
|
const { sound: newSound } = await Audio.Sound.createAsync(soundFile);
|
||||||
|
setSound(newSound);
|
||||||
|
};
|
||||||
|
|
||||||
|
loadSound();
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
sound?.unloadAsync();
|
||||||
|
};
|
||||||
|
}, [soundFile, sound]); // Include sound in the dependency array
|
||||||
|
|
||||||
|
const playSound = async () => {
|
||||||
|
if (sound) {
|
||||||
|
await sound.playAsync();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return playSound;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default useSoundEffect;
|