Merge pull request #224 from tyfiero/Add-iOS-client

Add react native app to iOS client
pull/227/head
Ty Fiero 9 months ago committed by GitHub
commit 0dbfe1fda7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -0,0 +1,13 @@
# iOS/Android Client
[WORK IN PROGRESS]
This repository contains the source code for the 01 iOS/Android app. Work in progress, we will continue to improve this application to get it working properly.
Feel free to improve this and make a pull request!
If you want to run it on your own, you will need expo.
1. Install dependencies `npm install`
2. Run the app `npx expo start`
3. Open the app in your simulator or on your device with the expo app by scanning the QR code

@ -0,0 +1,22 @@
import * as React from "react";
import { NavigationContainer } from "@react-navigation/native";
import { createNativeStackNavigator } from "@react-navigation/native-stack";
import HomeScreen from "./src/screens/HomeScreen";
import CameraScreen from "./src/screens/Camera";
import Main from "./src/screens/Main";
const Stack = createNativeStackNavigator();
function App() {
return (
<NavigationContainer>
<Stack.Navigator initialRouteName="Home">
<Stack.Screen name="Home" component={HomeScreen} />
<Stack.Screen name="Camera" component={CameraScreen} />
<Stack.Screen name="Main" component={Main} />
</Stack.Navigator>
</NavigationContainer>
);
}
export default App;

@ -0,0 +1,38 @@
{
"expo": {
"name": "01iOS",
"slug": "01iOS",
"version": "1.0.0",
"orientation": "portrait",
"icon": "./assets/icon.png",
"userInterfaceStyle": "light",
"splash": {
"image": "./assets/splash.png",
"resizeMode": "contain",
"backgroundColor": "#ffffff"
},
"assetBundlePatterns": ["**/*"],
"plugins": [
[
"expo-camera",
{
"cameraPermission": "Allow $(PRODUCT_NAME) to access your camera",
"microphonePermission": "Allow $(PRODUCT_NAME) to access your microphone",
"recordAudioAndroid": true
}
]
],
"ios": {
"supportsTablet": true
},
"android": {
"adaptiveIcon": {
"foregroundImage": "./assets/adaptive-icon.png",
"backgroundColor": "#ffffff"
}
},
"web": {
"favicon": "./assets/favicon.png"
}
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

@ -0,0 +1,6 @@
module.exports = function(api) {
api.cache(true);
return {
presets: ['babel-preset-expo'],
};
};

File diff suppressed because it is too large Load Diff

@ -0,0 +1,38 @@
{
"name": "01ios",
"version": "1.0.0",
"main": "node_modules/expo/AppEntry.js",
"scripts": {
"start": "expo start",
"android": "expo start --android",
"ios": "expo start --ios",
"web": "expo start --web",
"ts:check": "tsc"
},
"dependencies": {
"@react-navigation/native": "^6.1.14",
"@react-navigation/native-stack": "^6.9.22",
"expo": "~50.0.8",
"expo-camera": "~14.0.5",
"expo-status-bar": "~1.11.1",
"react": "18.2.0",
"react-native": "0.73.4",
"react-native-safe-area-context": "4.8.2",
"react-native-screens": "~3.29.0",
"expo-barcode-scanner": "~12.9.3",
"expo-av": "~13.10.5"
},
"devDependencies": {
"@babel/core": "^7.20.0",
"@types/react": "~18.2.45",
"typescript": "^5.1.3"
},
"ios": {
"infoPlist": {
"NSAppTransportSecurity": {
"NSAllowsArbitraryLoads": true
}
}
},
"private": true
}

@ -0,0 +1,102 @@
import React, { useState } from "react";
import { StyleSheet, Text, TouchableOpacity, View } from "react-native";
import { Camera } from "expo-camera";
import { useNavigation } from "@react-navigation/native";
import { BarCodeScanner } from "expo-barcode-scanner";
export default function CameraScreen() {
const [permission, requestPermission] = Camera.useCameraPermissions();
const [scanned, setScanned] = useState(false);
const navigation = useNavigation();
if (!permission) {
// Component is waiting for permission
return <View />;
}
if (!permission.granted) {
// No permission granted, request permission
return (
<View style={styles.container}>
<Text>No access to camera</Text>
<TouchableOpacity onPress={requestPermission} style={styles.button}>
<Text style={styles.text}>Grant Camera Access</Text>
</TouchableOpacity>
</View>
);
}
// function toggleCameraFacing() {
// setFacing((current) => (current === "back" ? "front" : "back"));
// }
const handleBarCodeScanned = ({
type,
data,
}: {
type: string;
data: string;
}) => {
setScanned(true);
console.log(
`Bar code with type ${type} and data ${data} has been scanned!`
);
alert(`Scanned URL: ${data}`);
navigation.navigate("Main", { scannedData: data });
};
return (
<View style={styles.container}>
<Camera
style={styles.camera}
facing={"back"}
onBarCodeScanned={scanned ? undefined : handleBarCodeScanned}
barCodeScannerSettings={{
barCodeTypes: [BarCodeScanner.Constants.BarCodeType.qr],
}}
>
<View style={styles.buttonContainer}>
{/* <TouchableOpacity style={styles.button} onPress={toggleCameraFacing}>
<Text style={styles.text}>Flip Camera</Text>
</TouchableOpacity> */}
{scanned && (
<TouchableOpacity
onPress={() => setScanned(false)}
style={styles.button}
>
<Text style={styles.text}>Scan Again</Text>
</TouchableOpacity>
)}
</View>
</Camera>
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
flexDirection: "column",
justifyContent: "flex-end",
},
camera: {
flex: 1,
},
buttonContainer: {
backgroundColor: "transparent",
flexDirection: "row",
margin: 20,
},
button: {
flex: 0.1,
alignSelf: "flex-end",
alignItems: "center",
backgroundColor: "#000",
borderRadius: 10,
padding: 15,
},
text: {
fontSize: 18,
color: "white",
},
});

@ -0,0 +1,47 @@
import React from "react";
import { View, Text, TouchableOpacity, StyleSheet } from "react-native";
import { useNavigation } from "@react-navigation/native";
const HomeScreen = () => {
const navigation = useNavigation();
return (
<View style={styles.container}>
<View style={styles.circle} />
<TouchableOpacity
style={styles.button}
onPress={() => navigation.navigate("Camera")}
>
<Text style={styles.buttonText}>Scan Code</Text>
</TouchableOpacity>
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: "center",
alignItems: "center",
backgroundColor: "#fff",
},
circle: {
width: 100,
height: 100,
borderRadius: 50,
backgroundColor: "black",
marginBottom: 20,
},
button: {
backgroundColor: "black",
paddingHorizontal: 20,
paddingVertical: 10,
borderRadius: 5,
},
buttonText: {
color: "white",
fontSize: 16,
},
});
export default HomeScreen;

@ -0,0 +1,171 @@
import React, { useState, useEffect } from "react";
import { View, Text, TouchableOpacity, StyleSheet } from "react-native";
import { Audio } from "expo-av";
interface MainProps {
route: {
params: {
scannedData: string;
};
};
}
const Main: React.FC<MainProps> = ({ route }) => {
const { scannedData } = route.params;
const [connectionStatus, setConnectionStatus] =
useState<string>("Connecting...");
const [ws, setWs] = useState<WebSocket | null>(null);
const [recording, setRecording] = useState<Audio.Recording | null>(null);
const [audioQueue, setAudioQueue] = useState<string[]>([]);
useEffect(() => {
const playNextAudio = async () => {
if (audioQueue.length > 0) {
const uri = audioQueue.shift();
const { sound } = await Audio.Sound.createAsync(
{ uri: uri! },
{ shouldPlay: true }
);
sound.setOnPlaybackStatusUpdate(async (status) => {
if (status.didJustFinish && !status.isLooping) {
await sound.unloadAsync();
playNextAudio();
}
});
}
};
let websocket: WebSocket;
try {
console.log("Connecting to WebSocket at " + scannedData);
websocket = new WebSocket(scannedData);
websocket.onopen = () => {
setConnectionStatus(`Connected to ${scannedData}`);
console.log("WebSocket connected");
};
websocket.onmessage = async (e) => {
console.log("Received message: ", e.data);
setAudioQueue((prevQueue) => [...prevQueue, e.data]);
if (audioQueue.length === 1) {
playNextAudio();
}
};
websocket.onerror = (error) => {
setConnectionStatus("Error connecting to WebSocket.");
console.error("WebSocket error: ", error);
};
websocket.onclose = () => {
setConnectionStatus("Disconnected.");
console.log("WebSocket disconnected");
};
setWs(websocket);
} catch (error) {
console.log(error);
setConnectionStatus("Error creating WebSocket.");
}
return () => {
if (websocket) {
websocket.close();
}
};
}, [scannedData, audioQueue]);
const startRecording = async () => {
if (recording) {
console.log("A recording is already in progress.");
return;
}
try {
console.log("Requesting permissions..");
await Audio.requestPermissionsAsync();
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
playsInSilentModeIOS: true,
});
console.log("Starting recording..");
const { recording: newRecording } = await Audio.Recording.createAsync(
Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY
);
setRecording(newRecording);
console.log("Recording started");
} catch (err) {
console.error("Failed to start recording", err);
}
};
const stopRecording = async () => {
console.log("Stopping recording..");
setRecording(null);
if (recording) {
await recording.stopAndUnloadAsync();
const uri = recording.getURI();
console.log("Recording stopped and stored at", uri);
if (ws && uri) {
ws.send(uri);
}
}
};
return (
<View style={styles.container}>
<Text
style={[
styles.statusText,
{ color: connectionStatus.startsWith("Connected") ? "green" : "red" },
]}
>
{connectionStatus}
</Text>
<TouchableOpacity
style={styles.button}
onPressIn={startRecording}
onPressOut={stopRecording}
>
<View style={styles.circle}>
<Text style={styles.buttonText}>Record</Text>
</View>
</TouchableOpacity>
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: "center",
alignItems: "center",
backgroundColor: "#fff",
},
circle: {
width: 100,
height: 100,
borderRadius: 50,
backgroundColor: "black",
justifyContent: "center",
alignItems: "center",
},
button: {
width: 100,
height: 100,
borderRadius: 50,
justifyContent: "center",
alignItems: "center",
},
buttonText: {
color: "white",
fontSize: 16,
},
statusText: {
marginBottom: 20,
fontSize: 16,
},
});
export default Main;

@ -0,0 +1,6 @@
{
"extends": "expo/tsconfig.base",
"compilerOptions": {
"strict": true
}
}
Loading…
Cancel
Save