UI changes and audio fixes

pull/256/head
Ty Fiero 9 months ago
parent 4647b24ccc
commit 9e60fee6ee

@ -10,7 +10,12 @@ const Stack = createNativeStackNavigator();
function App() {
return (
<NavigationContainer>
<Stack.Navigator initialRouteName="Home">
<Stack.Navigator
initialRouteName="Home"
screenOptions={{
headerShown: false, // This hides the navigation bar globally
}}
>
<Stack.Screen name="Home" component={HomeScreen} />
<Stack.Screen name="Camera" component={CameraScreen} />
<Stack.Screen name="Main" component={Main} />

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

@ -3,9 +3,11 @@ import { StyleSheet, Text, TouchableOpacity, View } from "react-native";
import { Camera } from "expo-camera";
import { useNavigation } from "@react-navigation/native";
import { BarCodeScanner } from "expo-barcode-scanner";
// import useSoundEffect from "../lib/useSoundEffect";
export default function CameraScreen() {
const [permission, requestPermission] = Camera.useCameraPermissions();
// const playYay = useSoundEffect(require("../../assets/yay.wav"));
const [scanned, setScanned] = useState(false);
const navigation = useNavigation();
@ -31,18 +33,20 @@ export default function CameraScreen() {
// setFacing((current) => (current === "back" ? "front" : "back"));
// }
const handleBarCodeScanned = ({
const handleBarCodeScanned = async ({
type,
data,
}: {
type: string;
data: string;
}) => {
// await playYay();
setScanned(true);
console.log(
`Bar code with type ${type} and data ${data} has been scanned!`
);
alert(`Scanned URL: ${data}`);
// alert(`Scanned URL: ${data}`);
navigation.navigate("Main", { scannedData: data });
};
return (
@ -64,7 +68,9 @@ export default function CameraScreen() {
onPress={() => setScanned(false)}
style={styles.button}
>
<Text style={styles.text}>Scan Again</Text>
<Text numberOfLines={1} style={styles.text}>
Scan Again
</Text>
</TouchableOpacity>
)}
</View>
@ -78,6 +84,7 @@ const styles = StyleSheet.create({
flex: 1,
flexDirection: "column",
justifyContent: "flex-end",
position: "relative",
},
camera: {
flex: 1,
@ -85,18 +92,22 @@ const styles = StyleSheet.create({
buttonContainer: {
backgroundColor: "transparent",
flexDirection: "row",
margin: 20,
margin: 2,
},
button: {
position: "absolute",
top: 44,
left: 4,
flex: 0.1,
alignSelf: "flex-end",
alignItems: "center",
backgroundColor: "#000",
borderRadius: 10,
padding: 15,
paddingHorizontal: 8,
paddingVertical: 6,
},
text: {
fontSize: 18,
fontSize: 14,
color: "white",
},
});

@ -23,23 +23,23 @@ const styles = StyleSheet.create({
flex: 1,
justifyContent: "center",
alignItems: "center",
backgroundColor: "#fff",
backgroundColor: "#000",
},
circle: {
width: 100,
height: 100,
borderRadius: 50,
backgroundColor: "black",
backgroundColor: "#fff",
marginBottom: 20,
},
button: {
backgroundColor: "black",
backgroundColor: "#fff",
paddingHorizontal: 20,
paddingVertical: 10,
borderRadius: 5,
},
buttonText: {
color: "white",
color: "#000",
fontSize: 16,
},
});

@ -1,5 +1,5 @@
import React, { useState, useEffect, useCallback, useRef } from "react";
import { View, Text, TouchableOpacity, StyleSheet } from "react-native";
import { View, Text, TouchableOpacity, StyleSheet, Image } from "react-native";
import * as FileSystem from "expo-file-system";
import { AVPlaybackStatus, AVPlaybackStatusSuccess, Audio } from "expo-av";
import { polyfill as polyfillEncoding } from "react-native-polyfill-globals/src/encoding";
@ -7,6 +7,9 @@ import { create } from "zustand";
import useStore from "../lib/state";
import { Animated } from "react-native";
import * as Haptics from "expo-haptics";
import useSoundEffect from "../lib/useSoundEffect";
import IconImage from "../../assets/qr.png";
import { useNavigation } from "@react-navigation/native";
interface MainProps {
route: {
@ -56,7 +59,9 @@ const Main: React.FC<MainProps> = ({ route }) => {
polyfillEncoding();
const backgroundColorAnim = useRef(new Animated.Value(0)).current;
const buttonBackgroundColorAnim = useRef(new Animated.Value(0)).current;
const playPip = useSoundEffect(require("../../assets/pip.mp3"));
const playPop = useSoundEffect(require("../../assets/pop.mp3"));
const navigation = useNavigation();
const backgroundColor = backgroundColorAnim.interpolate({
inputRange: [0, 1],
outputRange: ["black", "white"], // Change as needed
@ -168,7 +173,8 @@ const Main: React.FC<MainProps> = ({ route }) => {
websocket.binaryType = "blob";
websocket.onopen = () => {
setConnectionStatus(`Connected to ${scannedData}`);
setConnectionStatus(`Connected`);
// setConnectionStatus(`Connected to ${scannedData}`);
console.log("WebSocket connected");
};
@ -318,10 +324,25 @@ const Main: React.FC<MainProps> = ({ route }) => {
};
return (
<Animated.View style={[styles.container, { backgroundColor }]}>
{/* <TouchableOpacity
onPress={() => {
console.log("hi!");
navigation.navigate("Camera");
}}
>
<Animated.View style={styles.qr}>
<Image source={IconImage} style={styles.icon} />
</Animated.View>
</TouchableOpacity> */}
{/* <View style={styles.topBar}></View> */}
<View style={styles.middle}>
<Text
style={[
styles.statusText,
{ color: connectionStatus.startsWith("Connected") ? "green" : "red" },
{
color: connectionStatus.startsWith("Connected") ? "green" : "red",
},
]}
>
{connectionStatus}
@ -329,12 +350,14 @@ const Main: React.FC<MainProps> = ({ route }) => {
<TouchableOpacity
style={styles.button}
onPressIn={() => {
playPip();
setIsPressed(true);
toggleRecording(true); // Pass true when pressed
startRecording();
Haptics.impactAsync(Haptics.ImpactFeedbackStyle.Heavy);
}}
onPressOut={() => {
playPop();
setIsPressed(false);
toggleRecording(false); // Pass false when released
stopRecording();
@ -353,12 +376,17 @@ const Main: React.FC<MainProps> = ({ route }) => {
</Text> */}
</Animated.View>
</TouchableOpacity>
</View>
</Animated.View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
position: "relative",
},
middle: {
flex: 1,
justifyContent: "center",
alignItems: "center",
@ -372,6 +400,23 @@ const styles = StyleSheet.create({
justifyContent: "center",
alignItems: "center",
},
qr: {
position: "absolute",
top: 30,
left: 10,
padding: 10,
zIndex: 100,
},
icon: {
height: 40,
width: 40,
},
topBar: {
height: 40,
backgroundColor: "#000",
paddingTop: 50,
},
button: {
width: 100,
height: 100,
@ -389,7 +434,7 @@ const styles = StyleSheet.create({
},
statusText: {
position: "absolute",
bottom: 10,
bottom: 20,
alignSelf: "center",
fontSize: 12,
fontWeight: "bold",

@ -196,11 +196,11 @@ async def send_messages(websocket: WebSocket):
try:
if isinstance(message, dict):
print(f"Sending to the device: {type(message)} {str(message)[:100]}")
# print(f"Sending to the device: {type(message)} {str(message)[:100]}")
await websocket.send_json(message)
elif isinstance(message, bytes):
message = base64.b64encode(message)
print(f"Sending to the device: {type(message)} {str(message)[:100]}")
# print(f"Sending to the device: {type(message)} {str(message)[:100]}")
await websocket.send_bytes(message)
"""

Loading…
Cancel
Save