|
|
|
@ -1,9 +1,12 @@
|
|
|
|
|
import React, { useState, useEffect, useCallback } from "react";
|
|
|
|
|
import React, { useState, useEffect, useCallback, useRef } from "react";
|
|
|
|
|
import { View, Text, TouchableOpacity, StyleSheet } from "react-native";
|
|
|
|
|
import * as FileSystem from 'expo-file-system';
|
|
|
|
|
import * as FileSystem from "expo-file-system";
|
|
|
|
|
import { AVPlaybackStatus, AVPlaybackStatusSuccess, Audio } from "expo-av";
|
|
|
|
|
import { polyfill as polyfillEncoding } from 'react-native-polyfill-globals/src/encoding';
|
|
|
|
|
import { create } from 'zustand';
|
|
|
|
|
import { polyfill as polyfillEncoding } from "react-native-polyfill-globals/src/encoding";
|
|
|
|
|
import { create } from "zustand";
|
|
|
|
|
import useStore from "../lib/state";
|
|
|
|
|
import { Animated } from "react-native";
|
|
|
|
|
import * as Haptics from "expo-haptics";
|
|
|
|
|
|
|
|
|
|
interface MainProps {
|
|
|
|
|
route: {
|
|
|
|
@ -20,7 +23,8 @@ interface AudioQueueState {
|
|
|
|
|
|
|
|
|
|
const useAudioQueueStore = create<AudioQueueState>((set) => ({
|
|
|
|
|
audioQueue: [], // initial state
|
|
|
|
|
addToQueue: (uri) => set((state) => ({ audioQueue: [...state.audioQueue, uri] })), // action to set audio queue
|
|
|
|
|
addToQueue: (uri) =>
|
|
|
|
|
set((state) => ({ audioQueue: [...state.audioQueue, uri] })), // action to set audio queue
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
interface SoundState {
|
|
|
|
@ -35,85 +39,105 @@ const useSoundStore = create<SoundState>((set) => ({
|
|
|
|
|
|
|
|
|
|
const Main: React.FC<MainProps> = ({ route }) => {
|
|
|
|
|
const { scannedData } = route.params;
|
|
|
|
|
const [connectionStatus, setConnectionStatus] = useState<string>("Connecting...");
|
|
|
|
|
const [connectionStatus, setConnectionStatus] =
|
|
|
|
|
useState<string>("Connecting...");
|
|
|
|
|
const [ws, setWs] = useState<WebSocket | null>(null);
|
|
|
|
|
const [isPressed, setIsPressed] = useState(false);
|
|
|
|
|
const [recording, setRecording] = useState<Audio.Recording | null>(null);
|
|
|
|
|
const addToQueue = useAudioQueueStore((state) => state.addToQueue);
|
|
|
|
|
const audioQueue = useAudioQueueStore((state) => state.audioQueue);
|
|
|
|
|
const setSound = useSoundStore((state) => state.setSound);
|
|
|
|
|
const sound = useSoundStore((state) => state.sound);
|
|
|
|
|
const audioDir = FileSystem.documentDirectory + '01/audio/';
|
|
|
|
|
const [soundUriMap, setSoundUriMap] = useState<Map<Audio.Sound, string>>(
|
|
|
|
|
new Map()
|
|
|
|
|
);
|
|
|
|
|
const audioDir = FileSystem.documentDirectory + "01/audio/";
|
|
|
|
|
const [permissionResponse, requestPermission] = Audio.usePermissions();
|
|
|
|
|
polyfillEncoding();
|
|
|
|
|
|
|
|
|
|
const constructTempFilePath = async (buffer: string) => {
|
|
|
|
|
const backgroundColorAnim = useRef(new Animated.Value(0)).current;
|
|
|
|
|
const buttonBackgroundColorAnim = useRef(new Animated.Value(0)).current;
|
|
|
|
|
|
|
|
|
|
const backgroundColor = backgroundColorAnim.interpolate({
|
|
|
|
|
inputRange: [0, 1],
|
|
|
|
|
outputRange: ["black", "white"], // Change as needed
|
|
|
|
|
});
|
|
|
|
|
const buttonBackgroundColor = backgroundColorAnim.interpolate({
|
|
|
|
|
inputRange: [0, 1],
|
|
|
|
|
outputRange: ["white", "black"], // Inverse of the container
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const constructTempFilePath = async (buffer: string) => {
|
|
|
|
|
try {
|
|
|
|
|
await dirExists();
|
|
|
|
|
if (!buffer) {
|
|
|
|
|
console.log("Buffer is undefined or empty.");
|
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
const tempFilePath = `${audioDir}${Date.now()}.wav`;
|
|
|
|
|
|
|
|
|
|
await FileSystem.writeAsStringAsync(
|
|
|
|
|
tempFilePath,
|
|
|
|
|
buffer,
|
|
|
|
|
{
|
|
|
|
|
encoding: FileSystem.EncodingType.Base64,
|
|
|
|
|
}
|
|
|
|
|
);
|
|
|
|
|
await FileSystem.writeAsStringAsync(tempFilePath, buffer, {
|
|
|
|
|
encoding: FileSystem.EncodingType.Base64,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
return tempFilePath;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.log("Failed to construct temp file path:", error);
|
|
|
|
|
return null; // Return null to prevent crashing, error is logged
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
async function dirExists() {
|
|
|
|
|
/**
|
|
|
|
|
* Checks if audio directory exists in device storage, if not creates it.
|
|
|
|
|
*/
|
|
|
|
|
const dirInfo = await FileSystem.getInfoAsync(audioDir);
|
|
|
|
|
if (!dirInfo.exists) {
|
|
|
|
|
console.log("audio directory doesn't exist, creating...");
|
|
|
|
|
await FileSystem.makeDirectoryAsync(audioDir, { intermediates: true });
|
|
|
|
|
try {
|
|
|
|
|
const dirInfo = await FileSystem.getInfoAsync(audioDir);
|
|
|
|
|
if (!dirInfo.exists) {
|
|
|
|
|
console.error("audio directory doesn't exist, creating...");
|
|
|
|
|
await FileSystem.makeDirectoryAsync(audioDir, { intermediates: true });
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.error("Error checking or creating directory:", error);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const playNextAudio = async () => {
|
|
|
|
|
console.log(`in playNextAudio audioQueue is ${audioQueue.length} and sound is ${sound}`);
|
|
|
|
|
|
|
|
|
|
const playNextAudio = useCallback(async () => {
|
|
|
|
|
console.log(
|
|
|
|
|
`in playNextAudio audioQueue is ${audioQueue.length} and sound is ${sound}`
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
if (audioQueue.length > 0 && sound == null) {
|
|
|
|
|
const uri = audioQueue.shift() as string;
|
|
|
|
|
console.log("load audio from", uri);
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
const { sound } = await Audio.Sound.createAsync({ uri });
|
|
|
|
|
setSound(sound);
|
|
|
|
|
|
|
|
|
|
console.log("playing audio from", uri);
|
|
|
|
|
await sound?.playAsync();
|
|
|
|
|
|
|
|
|
|
sound.setOnPlaybackStatusUpdate(_onPlayBackStatusUpdate);
|
|
|
|
|
|
|
|
|
|
} catch (error){
|
|
|
|
|
const { sound: newSound } = await Audio.Sound.createAsync({ uri });
|
|
|
|
|
setSound(newSound);
|
|
|
|
|
setSoundUriMap(new Map(soundUriMap.set(newSound, uri)));
|
|
|
|
|
await newSound.playAsync();
|
|
|
|
|
newSound.setOnPlaybackStatusUpdate(_onPlayBackStatusUpdate);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.log("Error playing audio", error);
|
|
|
|
|
playNextAudio();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
console.log("audioQueue is empty or sound is not null");
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const _onPlayBackStatusUpdate = async (status: AVPlaybackStatus) => {
|
|
|
|
|
if (isAVPlaybackStatusSuccess(status) && status.didJustFinish === true){
|
|
|
|
|
console.log("on playback status update sound is ", sound);
|
|
|
|
|
if (sound != null){
|
|
|
|
|
console.log('Unloading Sound');
|
|
|
|
|
await sound.unloadAsync();
|
|
|
|
|
}, [audioQueue, sound, soundUriMap]);
|
|
|
|
|
|
|
|
|
|
const _onPlayBackStatusUpdate = useCallback(
|
|
|
|
|
async (status) => {
|
|
|
|
|
if (status.didJustFinish) {
|
|
|
|
|
await sound?.unloadAsync();
|
|
|
|
|
soundUriMap.delete(sound);
|
|
|
|
|
setSoundUriMap(new Map(soundUriMap));
|
|
|
|
|
setSound(null);
|
|
|
|
|
playNextAudio();
|
|
|
|
|
}
|
|
|
|
|
setSound(null);
|
|
|
|
|
console.log("audio has finished playing, playing next audio");
|
|
|
|
|
console.log(audioQueue);
|
|
|
|
|
playNextAudio();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
[sound, soundUriMap, playNextAudio]
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
const isAVPlaybackStatusSuccess = (
|
|
|
|
|
status: AVPlaybackStatus
|
|
|
|
@ -121,13 +145,17 @@ const Main: React.FC<MainProps> = ({ route }) => {
|
|
|
|
|
return (status as AVPlaybackStatusSuccess).isLoaded !== undefined;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// useEffect(() => {
|
|
|
|
|
// console.log("audioQueue has been updated:", audioQueue.length);
|
|
|
|
|
// if (audioQueue.length == 1) {
|
|
|
|
|
// playNextAudio();
|
|
|
|
|
// }
|
|
|
|
|
// }, [audioQueue]);
|
|
|
|
|
useEffect(() => {
|
|
|
|
|
console.log("audioQueue has been updated:", audioQueue.length);
|
|
|
|
|
if (audioQueue.length == 1) {
|
|
|
|
|
if (audioQueue.length > 0 && !sound) {
|
|
|
|
|
playNextAudio();
|
|
|
|
|
}
|
|
|
|
|
}, [audioQueue]);
|
|
|
|
|
|
|
|
|
|
}, [audioQueue, sound, playNextAudio]);
|
|
|
|
|
useEffect(() => {
|
|
|
|
|
console.log("sound has been updated:", sound);
|
|
|
|
|
}, [sound]);
|
|
|
|
@ -145,14 +173,34 @@ const Main: React.FC<MainProps> = ({ route }) => {
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
websocket.onmessage = async (e) => {
|
|
|
|
|
|
|
|
|
|
const message = JSON.parse(e.data);
|
|
|
|
|
console.log(message.content.slice(0, 50));
|
|
|
|
|
|
|
|
|
|
const buffer = await message.content as string;
|
|
|
|
|
const filePath = await constructTempFilePath(buffer);
|
|
|
|
|
addToQueue(filePath);
|
|
|
|
|
console.log("audio file written to", filePath);
|
|
|
|
|
try {
|
|
|
|
|
const message = JSON.parse(e.data);
|
|
|
|
|
|
|
|
|
|
if (message.content && typeof message.content === "string") {
|
|
|
|
|
console.log("✅✅✅✅✅✅✅✅✅✅✅✅✅✅✅✅✅✅✅ Audio message");
|
|
|
|
|
|
|
|
|
|
const buffer = message.content;
|
|
|
|
|
console.log(buffer.length);
|
|
|
|
|
if (buffer && buffer.length > 0) {
|
|
|
|
|
const filePath = await constructTempFilePath(buffer);
|
|
|
|
|
if (filePath !== null) {
|
|
|
|
|
addToQueue(filePath);
|
|
|
|
|
console.log("audio file written to", filePath);
|
|
|
|
|
} else {
|
|
|
|
|
console.error("Failed to create file path");
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
console.error("Received message is empty or undefined");
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
// console.log(typeof message);
|
|
|
|
|
// console.log(typeof message.content);
|
|
|
|
|
console.log("Received message content is not a string.");
|
|
|
|
|
console.log(message);
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.error("Error handling WebSocket message:", error);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
websocket.onerror = (error) => {
|
|
|
|
@ -185,7 +233,10 @@ const Main: React.FC<MainProps> = ({ route }) => {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
if (permissionResponse !== null && permissionResponse.status !== `granted`) {
|
|
|
|
|
if (
|
|
|
|
|
permissionResponse !== null &&
|
|
|
|
|
permissionResponse.status !== `granted`
|
|
|
|
|
) {
|
|
|
|
|
console.log("Requesting permission..");
|
|
|
|
|
await requestPermission();
|
|
|
|
|
}
|
|
|
|
@ -197,7 +248,9 @@ const Main: React.FC<MainProps> = ({ route }) => {
|
|
|
|
|
|
|
|
|
|
console.log("Starting recording..");
|
|
|
|
|
const newRecording = new Audio.Recording();
|
|
|
|
|
await newRecording.prepareToRecordAsync(Audio.RecordingOptionsPresets.HIGH_QUALITY);
|
|
|
|
|
await newRecording.prepareToRecordAsync(
|
|
|
|
|
Audio.RecordingOptionsPresets.HIGH_QUALITY
|
|
|
|
|
);
|
|
|
|
|
await newRecording.startAsync();
|
|
|
|
|
|
|
|
|
|
setRecording(newRecording);
|
|
|
|
@ -228,10 +281,9 @@ const Main: React.FC<MainProps> = ({ route }) => {
|
|
|
|
|
}
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if (ws && uri) {
|
|
|
|
|
const response = await fetch(uri);
|
|
|
|
|
console.log("fetched audio file", response);
|
|
|
|
|
// console.log("fetched audio file", response);
|
|
|
|
|
const blob = await response.blob();
|
|
|
|
|
|
|
|
|
|
const reader = new FileReader();
|
|
|
|
@ -242,16 +294,30 @@ const Main: React.FC<MainProps> = ({ route }) => {
|
|
|
|
|
ws.send(audioBytes);
|
|
|
|
|
const audioArray = new Uint8Array(audioBytes as ArrayBuffer);
|
|
|
|
|
const decoder = new TextDecoder("utf-8");
|
|
|
|
|
console.log("sent audio bytes to WebSocket", decoder.decode(audioArray).slice(0, 50));
|
|
|
|
|
console.log(
|
|
|
|
|
"sent audio bytes to WebSocket",
|
|
|
|
|
decoder.decode(audioArray).slice(0, 50)
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
}, [recording]);
|
|
|
|
|
|
|
|
|
|
const toggleRecording = (shouldPress: boolean) => {
|
|
|
|
|
Animated.timing(backgroundColorAnim, {
|
|
|
|
|
toValue: shouldPress ? 1 : 0,
|
|
|
|
|
duration: 400,
|
|
|
|
|
useNativeDriver: false, // 'backgroundColor' does not support native driver
|
|
|
|
|
}).start();
|
|
|
|
|
Animated.timing(buttonBackgroundColorAnim, {
|
|
|
|
|
toValue: shouldPress ? 1 : 0,
|
|
|
|
|
duration: 400,
|
|
|
|
|
useNativeDriver: false, // 'backgroundColor' does not support native driver
|
|
|
|
|
}).start();
|
|
|
|
|
};
|
|
|
|
|
return (
|
|
|
|
|
<View style={styles.container}>
|
|
|
|
|
<Animated.View style={[styles.container, { backgroundColor }]}>
|
|
|
|
|
<Text
|
|
|
|
|
style={[
|
|
|
|
|
styles.statusText,
|
|
|
|
@ -262,30 +328,47 @@ const Main: React.FC<MainProps> = ({ route }) => {
|
|
|
|
|
</Text>
|
|
|
|
|
<TouchableOpacity
|
|
|
|
|
style={styles.button}
|
|
|
|
|
onPressIn={startRecording}
|
|
|
|
|
onPressOut={stopRecording}
|
|
|
|
|
onPressIn={() => {
|
|
|
|
|
setIsPressed(true);
|
|
|
|
|
toggleRecording(true); // Pass true when pressed
|
|
|
|
|
startRecording();
|
|
|
|
|
Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy);
|
|
|
|
|
}}
|
|
|
|
|
onPressOut={() => {
|
|
|
|
|
setIsPressed(false);
|
|
|
|
|
toggleRecording(false); // Pass false when released
|
|
|
|
|
stopRecording();
|
|
|
|
|
Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy);
|
|
|
|
|
}}
|
|
|
|
|
>
|
|
|
|
|
<View style={styles.circle}>
|
|
|
|
|
<Text style={styles.buttonText}>Record</Text>
|
|
|
|
|
</View>
|
|
|
|
|
<Animated.View
|
|
|
|
|
style={[styles.circle, { backgroundColor: buttonBackgroundColor }]}
|
|
|
|
|
>
|
|
|
|
|
{/* <Text
|
|
|
|
|
style={
|
|
|
|
|
recording ? styles.buttonTextRecording : styles.buttonTextDefault
|
|
|
|
|
}
|
|
|
|
|
>
|
|
|
|
|
Record
|
|
|
|
|
</Text> */}
|
|
|
|
|
</Animated.View>
|
|
|
|
|
</TouchableOpacity>
|
|
|
|
|
</View>
|
|
|
|
|
</Animated.View>
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const styles = StyleSheet.create({
|
|
|
|
|
container: {
|
|
|
|
|
flex: 1,
|
|
|
|
|
justifyContent: 'center',
|
|
|
|
|
justifyContent: "center",
|
|
|
|
|
alignItems: "center",
|
|
|
|
|
backgroundColor: '#ecf0f1',
|
|
|
|
|
padding: 10,
|
|
|
|
|
position: "relative",
|
|
|
|
|
},
|
|
|
|
|
circle: {
|
|
|
|
|
width: 100,
|
|
|
|
|
height: 100,
|
|
|
|
|
borderRadius: 50,
|
|
|
|
|
backgroundColor: "black",
|
|
|
|
|
justifyContent: "center",
|
|
|
|
|
alignItems: "center",
|
|
|
|
|
},
|
|
|
|
@ -296,13 +379,20 @@ const styles = StyleSheet.create({
|
|
|
|
|
justifyContent: "center",
|
|
|
|
|
alignItems: "center",
|
|
|
|
|
},
|
|
|
|
|
buttonText: {
|
|
|
|
|
buttonTextDefault: {
|
|
|
|
|
color: "black",
|
|
|
|
|
fontSize: 16,
|
|
|
|
|
},
|
|
|
|
|
buttonTextRecording: {
|
|
|
|
|
color: "white",
|
|
|
|
|
fontSize: 16,
|
|
|
|
|
},
|
|
|
|
|
statusText: {
|
|
|
|
|
marginBottom: 20,
|
|
|
|
|
fontSize: 16,
|
|
|
|
|
position: "absolute",
|
|
|
|
|
bottom: 10,
|
|
|
|
|
alignSelf: "center",
|
|
|
|
|
fontSize: 12,
|
|
|
|
|
fontWeight: "bold",
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|