diff --git a/software/source/clients/ios/react-native/App.tsx b/software/source/clients/ios/react-native/App.tsx index 7881ba3..5fa7a05 100644 --- a/software/source/clients/ios/react-native/App.tsx +++ b/software/source/clients/ios/react-native/App.tsx @@ -10,7 +10,12 @@ const Stack = createNativeStackNavigator(); function App() { return ( - + diff --git a/software/source/clients/ios/react-native/assets/pip.mp3 b/software/source/clients/ios/react-native/assets/pip.mp3 new file mode 100644 index 0000000..bc15afa Binary files /dev/null and b/software/source/clients/ios/react-native/assets/pip.mp3 differ diff --git a/software/source/clients/ios/react-native/assets/pop.mp3 b/software/source/clients/ios/react-native/assets/pop.mp3 new file mode 100644 index 0000000..dedc4d1 Binary files /dev/null and b/software/source/clients/ios/react-native/assets/pop.mp3 differ diff --git a/software/source/clients/ios/react-native/assets/qr.png b/software/source/clients/ios/react-native/assets/qr.png new file mode 100644 index 0000000..33cf7e0 Binary files /dev/null and b/software/source/clients/ios/react-native/assets/qr.png differ diff --git a/software/source/clients/ios/react-native/assets/yay.wav b/software/source/clients/ios/react-native/assets/yay.wav new file mode 100644 index 0000000..6754870 Binary files /dev/null and b/software/source/clients/ios/react-native/assets/yay.wav differ diff --git a/software/source/clients/ios/react-native/src/screens/Camera.tsx b/software/source/clients/ios/react-native/src/screens/Camera.tsx index 401c7f8..9a5d902 100644 --- a/software/source/clients/ios/react-native/src/screens/Camera.tsx +++ b/software/source/clients/ios/react-native/src/screens/Camera.tsx @@ -3,9 +3,11 @@ import { StyleSheet, Text, TouchableOpacity, View } from "react-native"; import { Camera } from "expo-camera"; import { useNavigation } from "@react-navigation/native"; import { BarCodeScanner } from "expo-barcode-scanner"; +// import useSoundEffect from "../lib/useSoundEffect"; export default function CameraScreen() { const [permission, requestPermission] = Camera.useCameraPermissions(); + // const playYay = useSoundEffect(require("../../assets/yay.wav")); const [scanned, setScanned] = useState(false); const navigation = useNavigation(); @@ -31,18 +33,20 @@ export default function CameraScreen() { // setFacing((current) => (current === "back" ? "front" : "back")); // } - const handleBarCodeScanned = ({ + const handleBarCodeScanned = async ({ type, data, }: { type: string; data: string; }) => { + // await playYay(); setScanned(true); console.log( `Bar code with type ${type} and data ${data} has been scanned!` ); - alert(`Scanned URL: ${data}`); + // alert(`Scanned URL: ${data}`); + navigation.navigate("Main", { scannedData: data }); }; return ( @@ -64,7 +68,9 @@ export default function CameraScreen() { onPress={() => setScanned(false)} style={styles.button} > - Scan Again + + Scan Again + )} @@ -78,6 +84,7 @@ const styles = StyleSheet.create({ flex: 1, flexDirection: "column", justifyContent: "flex-end", + position: "relative", }, camera: { flex: 1, @@ -85,18 +92,22 @@ const styles = StyleSheet.create({ buttonContainer: { backgroundColor: "transparent", flexDirection: "row", - margin: 20, + margin: 2, }, button: { + position: "absolute", + top: 44, + left: 4, flex: 0.1, alignSelf: "flex-end", alignItems: "center", backgroundColor: "#000", borderRadius: 10, - padding: 15, + paddingHorizontal: 8, + paddingVertical: 6, }, text: { - fontSize: 18, + fontSize: 14, color: "white", }, }); diff --git a/software/source/clients/ios/react-native/src/screens/HomeScreen.tsx b/software/source/clients/ios/react-native/src/screens/HomeScreen.tsx index 27ddd4f..270fdbc 100644 --- a/software/source/clients/ios/react-native/src/screens/HomeScreen.tsx +++ b/software/source/clients/ios/react-native/src/screens/HomeScreen.tsx @@ -23,23 +23,23 @@ const styles = StyleSheet.create({ flex: 1, justifyContent: "center", alignItems: "center", - backgroundColor: "#fff", + backgroundColor: "#000", }, circle: { width: 100, height: 100, borderRadius: 50, - backgroundColor: "black", + backgroundColor: "#fff", marginBottom: 20, }, button: { - backgroundColor: "black", + backgroundColor: "#fff", paddingHorizontal: 20, paddingVertical: 10, borderRadius: 5, }, buttonText: { - color: "white", + color: "#000", fontSize: 16, }, }); diff --git a/software/source/clients/ios/react-native/src/screens/Main.tsx b/software/source/clients/ios/react-native/src/screens/Main.tsx index 5c360ab..08519b4 100644 --- a/software/source/clients/ios/react-native/src/screens/Main.tsx +++ b/software/source/clients/ios/react-native/src/screens/Main.tsx @@ -1,5 +1,5 @@ import React, { useState, useEffect, useCallback, useRef } from "react"; -import { View, Text, TouchableOpacity, StyleSheet } from "react-native"; +import { View, Text, TouchableOpacity, StyleSheet, Image } from "react-native"; import * as FileSystem from "expo-file-system"; import { AVPlaybackStatus, AVPlaybackStatusSuccess, Audio } from "expo-av"; import { polyfill as polyfillEncoding } from "react-native-polyfill-globals/src/encoding"; @@ -7,6 +7,9 @@ import { create } from "zustand"; import useStore from "../lib/state"; import { Animated } from "react-native"; import * as Haptics from "expo-haptics"; +import useSoundEffect from "../lib/useSoundEffect"; +import IconImage from "../../assets/qr.png"; +import { useNavigation } from "@react-navigation/native"; interface MainProps { route: { @@ -56,7 +59,9 @@ const Main: React.FC = ({ route }) => { polyfillEncoding(); const backgroundColorAnim = useRef(new Animated.Value(0)).current; const buttonBackgroundColorAnim = useRef(new Animated.Value(0)).current; - + const playPip = useSoundEffect(require("../../assets/pip.mp3")); + const playPop = useSoundEffect(require("../../assets/pop.mp3")); + const navigation = useNavigation(); const backgroundColor = backgroundColorAnim.interpolate({ inputRange: [0, 1], outputRange: ["black", "white"], // Change as needed @@ -168,7 +173,8 @@ const Main: React.FC = ({ route }) => { websocket.binaryType = "blob"; websocket.onopen = () => { - setConnectionStatus(`Connected to ${scannedData}`); + setConnectionStatus(`Connected`); + // setConnectionStatus(`Connected to ${scannedData}`); console.log("WebSocket connected"); }; @@ -318,47 +324,69 @@ const Main: React.FC = ({ route }) => { }; return ( - - {connectionStatus} - - { - setIsPressed(true); - toggleRecording(true); // Pass true when pressed - startRecording(); - Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy); - }} - onPressOut={() => { - setIsPressed(false); - toggleRecording(false); // Pass false when released - stopRecording(); - Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy); + {/* { + console.log("hi!"); + + navigation.navigate("Camera"); }} > - + + + */} + {/* */} + + - {/* + { + playPip(); + setIsPressed(true); + toggleRecording(true); // Pass true when pressed + startRecording(); + Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy); + }} + onPressOut={() => { + playPop(); + setIsPressed(false); + toggleRecording(false); // Pass false when released + stopRecording(); + Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy); + }} + > + - Record - */} - - + {/* + Record + */} + + + ); }; const styles = StyleSheet.create({ container: { + flex: 1, + position: "relative", + }, + middle: { flex: 1, justifyContent: "center", alignItems: "center", @@ -372,6 +400,23 @@ const styles = StyleSheet.create({ justifyContent: "center", alignItems: "center", }, + qr: { + position: "absolute", + top: 30, + left: 10, + padding: 10, + zIndex: 100, + }, + icon: { + height: 40, + width: 40, + }, + topBar: { + height: 40, + backgroundColor: "#000", + paddingTop: 50, + }, + button: { width: 100, height: 100, @@ -389,7 +434,7 @@ const styles = StyleSheet.create({ }, statusText: { position: "absolute", - bottom: 10, + bottom: 20, alignSelf: "center", fontSize: 12, fontWeight: "bold", diff --git a/software/source/server/server.py b/software/source/server/server.py index a1a7ef2..e49c220 100644 --- a/software/source/server/server.py +++ b/software/source/server/server.py @@ -196,11 +196,11 @@ async def send_messages(websocket: WebSocket): try: if isinstance(message, dict): - print(f"Sending to the device: {type(message)} {str(message)[:100]}") + # print(f"Sending to the device: {type(message)} {str(message)[:100]}") await websocket.send_json(message) elif isinstance(message, bytes): message = base64.b64encode(message) - print(f"Sending to the device: {type(message)} {str(message)[:100]}") + # print(f"Sending to the device: {type(message)} {str(message)[:100]}") await websocket.send_bytes(message) """