Commit 3487f8e6 authored by ple98's avatar ple98

new speech therapy screens

parent aa21b521
......@@ -13,6 +13,8 @@ import ProgressMapScreen from './screens/ProgressMapScreen';
import WordGameScreen from './screens/WordGameScreen';
import PhoneticWordScreen from './screens/PhoneticWordsScreen';
import FlipCardGameScreen from './screens/FlipCardGameScreen';
import SpeechTherapyScreen_1 from './screens/SpeechTherapy_1';
import SpeechTherapyScreen_2 from './screens/SpeechTherapy_2';
const MainStack = createStackNavigator(
{
......@@ -31,6 +33,12 @@ const MainStack = createStackNavigator(
SpeechTherapy: {
screen: SpeechTherapyScreen,
},
SpeechTherapy_1: {
screen: SpeechTherapyScreen_1,
},
SpeechTherapy_2: {
screen: SpeechTherapyScreen_2,
},
ProgressMap: {
screen: ProgressMapScreen,
},
......@@ -43,6 +51,7 @@ const MainStack = createStackNavigator(
FlipCardGame: {
screen: FlipCardGameScreen,
},
},
{
initialRouteName: 'Login',
......
......@@ -14,6 +14,7 @@
"axios": "^1.3.6",
"expo": "~48.0.9",
"expo-av": "~13.2.1",
"expo-camera": "~13.2.1",
"expo-file-system": "~15.2.2",
"expo-screen-orientation": "~5.1.1",
"expo-status-bar": "~1.4.4",
......@@ -7349,6 +7350,14 @@
"prop-types": "*"
}
},
"node_modules/dequal": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dequal/-/dequal-1.0.1.tgz",
"integrity": "sha512-Fx8jxibzkJX2aJgyfSdLhr9tlRoTnHKrRJuu2XHlAgKioN2j19/Bcbe0d4mFXYZ3+wpE2KVobUVTfDutcD17xQ==",
"engines": {
"node": ">=6"
}
},
"node_modules/destroy": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
......@@ -7819,6 +7828,43 @@
"expo": "*"
}
},
"node_modules/expo-camera": {
"version": "13.2.1",
"resolved": "https://registry.npmjs.org/expo-camera/-/expo-camera-13.2.1.tgz",
"integrity": "sha512-fZdRyF402jJGGmLVlumrLcr5Em9+Y2SO1MIlxLBtHXnybyHbTRMRAbzVapKX1Aryfujqadh+Kl+sdsWYkMuJjg==",
"dependencies": {
"@koale/useworker": "^4.0.2",
"invariant": "^2.2.4"
},
"peerDependencies": {
"expo": "*"
}
},
"node_modules/expo-camera/node_modules/@koale/useworker": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/@koale/useworker/-/useworker-4.0.2.tgz",
"integrity": "sha512-xPIPADtom8/3/4FLNj7MvNcBM/Z2FleH85Fdx2O869eoKW8+PoEgtSVvoxWjCWMA46Sm9A5/R1TyzNGc+yM0wg==",
"dependencies": {
"dequal": "^1.0.0"
},
"peerDependencies": {
"react": "^16.8.0"
}
},
"node_modules/expo-camera/node_modules/react": {
"version": "16.14.0",
"resolved": "https://registry.npmjs.org/react/-/react-16.14.0.tgz",
"integrity": "sha512-0X2CImDkJGApiAlcf0ODKIneSwBPhqJawOa5wCtKbu7ZECrmS26NvtSILynQ66cgkT/RJ4LidJOc3bUESwmU8g==",
"peer": true,
"dependencies": {
"loose-envify": "^1.1.0",
"object-assign": "^4.1.1",
"prop-types": "^15.6.2"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/expo-constants": {
"version": "14.2.1",
"resolved": "https://registry.npmjs.org/expo-constants/-/expo-constants-14.2.1.tgz",
......@@ -20767,6 +20813,11 @@
"prop-types": "*"
}
},
"dequal": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dequal/-/dequal-1.0.1.tgz",
"integrity": "sha512-Fx8jxibzkJX2aJgyfSdLhr9tlRoTnHKrRJuu2XHlAgKioN2j19/Bcbe0d4mFXYZ3+wpE2KVobUVTfDutcD17xQ=="
},
"destroy": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
......@@ -21129,6 +21180,36 @@
"integrity": "sha512-mC0mYSzaOaZgXjzhW2l4Ag325JjH6q5IbptfwD7gkMOFYy7VPOMxEMUnetadbs3DDzmgE6vUWrTjUIUbwq59qg==",
"requires": {}
},
"expo-camera": {
"version": "13.2.1",
"resolved": "https://registry.npmjs.org/expo-camera/-/expo-camera-13.2.1.tgz",
"integrity": "sha512-fZdRyF402jJGGmLVlumrLcr5Em9+Y2SO1MIlxLBtHXnybyHbTRMRAbzVapKX1Aryfujqadh+Kl+sdsWYkMuJjg==",
"requires": {
"@koale/useworker": "^4.0.2",
"invariant": "^2.2.4"
},
"dependencies": {
"@koale/useworker": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/@koale/useworker/-/useworker-4.0.2.tgz",
"integrity": "sha512-xPIPADtom8/3/4FLNj7MvNcBM/Z2FleH85Fdx2O869eoKW8+PoEgtSVvoxWjCWMA46Sm9A5/R1TyzNGc+yM0wg==",
"requires": {
"dequal": "^1.0.0"
}
},
"react": {
"version": "16.14.0",
"resolved": "https://registry.npmjs.org/react/-/react-16.14.0.tgz",
"integrity": "sha512-0X2CImDkJGApiAlcf0ODKIneSwBPhqJawOa5wCtKbu7ZECrmS26NvtSILynQ66cgkT/RJ4LidJOc3bUESwmU8g==",
"peer": true,
"requires": {
"loose-envify": "^1.1.0",
"object-assign": "^4.1.1",
"prop-types": "^15.6.2"
}
}
}
},
"expo-constants": {
"version": "14.2.1",
"resolved": "https://registry.npmjs.org/expo-constants/-/expo-constants-14.2.1.tgz",
......@@ -17,12 +17,12 @@ const PhoneticWordsScreen = () => {
const woodboardPath = './assets/ST/woodboard.png';
const audioFiles = {
battle: require('./assets/ST/content/battle.m4a'),
kettle: require('./assets/ST/content/kettle.m4a'),
bottle: require('./assets/ST/content/bottle.m4a'),
rat: require('./assets/ST/content/rat.m4a'),
bat: require('./assets/ST/content/bat.m4a'),
cat: require('./assets/ST/content/cat.m4a'),
battle: require('./assets/ST/content/battle.mp3'),
kettle: require('./assets/ST/content/kettle.mp3'),
bottle: require('./assets/ST/content/bottle.mp3'),
rat: require('./assets/ST/content/rat.mp3'),
bat: require('./assets/ST/content/bat.mp3'),
cat: require('./assets/ST/content/cat.mp3'),
};
useEffect(() => {
......
......@@ -23,7 +23,7 @@ const SpeechTherapyScreen = ({ navigation }) => {
const [hasPermission, setHasPermission] = useState(null);
const text = "I go to school";
const audioPath = './assets/ST/content/school_sentence.m4a';
const audioPath = './assets/ST/content/school_sentence.mp3';
const backgroundPath = './assets/ST/blackboard.png';
const buttonPath = './assets/ST/next.png';
const owlPath = './assets/ST/owl.png';
......@@ -187,7 +187,7 @@ const SpeechTherapyScreen = ({ navigation }) => {
};
const navigateToPhoneticWords = () => {
navigation.navigate('PhoneticWord');
navigation.navigate('SpeechTherapy_1');
};
......
import React, { useEffect, useState } from 'react';
import {
View,
Text,
StyleSheet,
ImageBackground,
TouchableOpacity,
Image,
TouchableWithoutFeedback,
} from 'react-native';
import { Audio } from 'expo-av';
import * as ScreenOrientation from 'expo-screen-orientation';
import * as FileSystem from 'expo-file-system';
import axios from 'axios';
import Toast from 'react-native-toast-message';
import { Camera } from 'expo-camera';
import { useRef } from 'react';
const SpeechTherapyScreen_1 = ({ navigation }) => {
const [sound, setSound] = useState(null);
const [progress, setProgress] = useState(0);
const [hasPermission, setHasPermission] = useState(null);
const text = "Small ball on the floor";
const audioPath = './assets/ST/content/small_ball_sentence.mp3';
const backgroundPath = './assets/ST/blackboard.png';
const buttonPath = './assets/ST/next.png';
const owlPath = './assets/ST/owl.png';
const recordingIconPath = './assets/ST/recording_icon.png';
const [recording, setRecording] = useState(null);
const [receivedText, setReceivedText] = useState('');
const cameraRef = useRef(null);
const startRecording = async () => {
console.debug("Starting recording");
if (recording) {
return;
}
try {
await Audio.requestPermissionsAsync();
const newRecording = new Audio.Recording();
await newRecording.prepareToRecordAsync(Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY);
setRecording(newRecording);
await newRecording.startAsync();
} catch (error) {
console.error('Error while recording:', error);
}
};
const stopRecording = async () => {
if (!recording) {
return;
}
await recording.stopAndUnloadAsync();
console.debug("Stopping recording");
const uri = recording.getURI();
setRecording(null);
sendAudio(uri);
};
// const sendAudio = async (uri) => {
// console.debug("Sending...");
// try {
// const audioData = await FileSystem.readAsStringAsync(uri, { encoding: FileSystem.EncodingType.Base64 });
// const response = await axios.post('http://192.168.10.243:5000/api/upload-audio', { audio: audioData });
// console.log('Audio sent:', response.data);
// } catch (error) {
// console.error('Error while sending audio:', error);
// }
// };
const sendAudio = async (uri) => {
console.debug("Sending...");
try {
const formData = new FormData();
const audioFile = await FileSystem.getInfoAsync(uri);
formData.append('audio', {
uri: audioFile.uri,
type: 'audio/mp3', // Replace with 'audio/m4a' if using iOS
name: 'recording.mp3', // Replace with 'recording.m4a' if using iOS
});
const response = await axios.post('http://192.168.1.42:5000/api/upload-audio', formData, {
headers: {
'Content-Type': 'multipart/form-data',
},
});
console.log('Audio sent:', response.data);
//setReceivedText(response.data); // Update the receivedText state
// Show the toast message
Toast.show({
type: 'success',
text1: response.data,
autoHide: true,
visibilityTime: 4000,
});
} catch (error) {
console.error('Error while sending audio:', error);
}
};
const handleScreenTouch = async () => {
if (sound) {
await sound.unloadAsync();
}
const { sound: newSound } = await Audio.Sound.createAsync(
require(audioPath),
{},
updatePlaybackStatus,
);
setSound(newSound);
await newSound.playAsync();
startRecording(); //Start recording
};
useEffect(() => {
async function changeScreenOrientation() {
await ScreenOrientation.lockAsync(ScreenOrientation.OrientationLock.LANDSCAPE);
}
changeScreenOrientation();
return async () => {
await ScreenOrientation.lockAsync(ScreenOrientation.OrientationLock.PORTRAIT);
};
}, []);
useEffect(() => {
async function playAudio() {
const { sound } = await Audio.Sound.createAsync(
require(audioPath),
{},
updatePlaybackStatus,
);
setSound(sound);
await sound.playAsync();
}
playAudio();
return () => {
if (sound) {
sound.unloadAsync();
}
};
}, []);
useEffect(() => {
let timer;
if (recording) {
timer = setTimeout(async () => {
await stopRecording();
}, 3000);
}
return () => {
clearTimeout(timer);
};
}, [recording]);
useEffect(() => {
(async () => {
const { status } = await Camera.requestPermissionsAsync();
setHasPermission(status === 'granted');
})();
}, []);
const updatePlaybackStatus = (status) => {
if (status.isLoaded) {
setProgress(status.positionMillis / status.durationMillis);
}
};
const getCharColor = (charIndex) => {
const charProgress = charIndex / text.length;
return charProgress <= progress ? '#FFCE6D' : 'white';
};
const navigateToPhoneticWords = () => {
navigation.navigate('SpeechTherapy_2');
};
const screenBackgroundPath = './assets/ST/st_bg.png';
return (
<View style={styles.container}>
<ImageBackground
source={require(screenBackgroundPath)}
style={styles.background}
resizeMode="cover"
/>
<TouchableWithoutFeedback onPress={handleScreenTouch}>
<View style={styles.content}>
<View style={styles.backgroundWrapper}>
<ImageBackground
source={require(backgroundPath)}
style={styles.backgroundImage}
resizeMode="contain"
>
<Text style={styles.text}>
{text.split('').map((char, index) => (
<Text key={index} style={{ color: getCharColor(index) }}>
{char}
</Text>
))}
</Text>
</ImageBackground>
</View>
<TouchableOpacity onPress={navigateToPhoneticWords} style={styles.button}>
<Image source={require(buttonPath)} style={styles.buttonImage} />
</TouchableOpacity>
<Image source={require(owlPath)} style={styles.owlImage} />
{recording && <Image source={require(recordingIconPath)} style={styles.recordingIcon} />}
</View>
</TouchableWithoutFeedback>
{hasPermission && (
<Camera
style={styles.camera}
type={Camera.Constants.Type.front}
ref={cameraRef}
ratio="1:1"
/>
)}
<Toast ref={(ref) => Toast.setRef(ref)} />
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
},
background: {
position: 'absolute',
top: 0,
left: 0,
right: 0,
bottom: 0,
},
content: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
},
backgroundWrapper: {
width: '90%',
height: '80%',
alignItems: 'center',
justifyContent: 'center',
},
backgroundImage: {
width: '100%',
height: '100%',
alignItems: 'center',
justifyContent: 'center',
},
text: {
fontSize: 50,
fontWeight: 'bold',
},
button: {
position: 'absolute',
bottom: 20,
right: 20,
},
buttonImage: {
width: 50,
height: 50,
},
owlImage: {
position: 'absolute',
bottom: 10,
left: 30,
width: 190,
height: 140,
},
recordingIcon: {
position: 'absolute',
bottom: 90,
right: 22,
width:45,
height: 45,
},
rreceivedTextContainer: {
position: 'absolute',
bottom: 0,
paddingHorizontal: 10,
paddingVertical: 5,
alignItems: 'center',
justifyContent: 'center',
// backgroundColor: 'rgba(0, 0, 0, 0.6)', // Remove this line
},
receivedText: {
fontSize:35,
color: 'red', // Change the text color to white
},
camera: {
position: 'absolute',
top: 39,
right: 6,
width: 140,
height: 120,
overflow: 'hidden',
borderRadius: 5,
}
});
export default SpeechTherapyScreen_1;
import React, { useEffect, useState } from 'react';
import {
View,
Text,
StyleSheet,
ImageBackground,
TouchableOpacity,
Image,
TouchableWithoutFeedback,
} from 'react-native';
import { Audio } from 'expo-av';
import * as ScreenOrientation from 'expo-screen-orientation';
import * as FileSystem from 'expo-file-system';
import axios from 'axios';
import Toast from 'react-native-toast-message';
import { Camera } from 'expo-camera';
import { useRef } from 'react';
const SpeechTherapyScreen_2 = ({ navigation }) => {
const [sound, setSound] = useState(null);
const [progress, setProgress] = useState(0);
const [hasPermission, setHasPermission] = useState(null);
const text = "Sunset is beautiful";
const audioPath = './assets/ST/content/sunset_sentence.mp3';
const backgroundPath = './assets/ST/blackboard.png';
const buttonPath = './assets/ST/next.png';
const owlPath = './assets/ST/owl.png';
const recordingIconPath = './assets/ST/recording_icon.png';
const [recording, setRecording] = useState(null);
const [receivedText, setReceivedText] = useState('');
const cameraRef = useRef(null);
const startRecording = async () => {
console.debug("Starting recording");
if (recording) {
return;
}
try {
await Audio.requestPermissionsAsync();
const newRecording = new Audio.Recording();
await newRecording.prepareToRecordAsync(Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY);
setRecording(newRecording);
await newRecording.startAsync();
} catch (error) {
console.error('Error while recording:', error);
}
};
const stopRecording = async () => {
if (!recording) {
return;
}
await recording.stopAndUnloadAsync();
console.debug("Stopping recording");
const uri = recording.getURI();
setRecording(null);
sendAudio(uri);
};
// const sendAudio = async (uri) => {
// console.debug("Sending...");
// try {
// const audioData = await FileSystem.readAsStringAsync(uri, { encoding: FileSystem.EncodingType.Base64 });
// const response = await axios.post('http://192.168.10.243:5000/api/upload-audio', { audio: audioData });
// console.log('Audio sent:', response.data);
// } catch (error) {
// console.error('Error while sending audio:', error);
// }
// };
const sendAudio = async (uri) => {
console.debug("Sending...");
try {
const formData = new FormData();
const audioFile = await FileSystem.getInfoAsync(uri);
formData.append('audio', {
uri: audioFile.uri,
type: 'audio/mp3', // Replace with 'audio/m4a' if using iOS
name: 'recording.mp3', // Replace with 'recording.m4a' if using iOS
});
const response = await axios.post('http://192.168.1.42:5000/api/upload-audio', formData, {
headers: {
'Content-Type': 'multipart/form-data',
},
});
console.log('Audio sent:', response.data);
//setReceivedText(response.data); // Update the receivedText state
// Show the toast message
Toast.show({
type: 'success',
text1: response.data,
autoHide: true,
visibilityTime: 4000,
});
} catch (error) {
console.error('Error while sending audio:', error);
}
};
const handleScreenTouch = async () => {
if (sound) {
await sound.unloadAsync();
}
const { sound: newSound } = await Audio.Sound.createAsync(
require(audioPath),
{},
updatePlaybackStatus,
);
setSound(newSound);
await newSound.playAsync();
startRecording(); //Start recording
};
useEffect(() => {
async function changeScreenOrientation() {
await ScreenOrientation.lockAsync(ScreenOrientation.OrientationLock.LANDSCAPE);
}
changeScreenOrientation();
return async () => {
await ScreenOrientation.lockAsync(ScreenOrientation.OrientationLock.PORTRAIT);
};
}, []);
useEffect(() => {
async function playAudio() {
const { sound } = await Audio.Sound.createAsync(
require(audioPath),
{},
updatePlaybackStatus,
);
setSound(sound);
await sound.playAsync();
}
playAudio();
return () => {
if (sound) {
sound.unloadAsync();
}
};
}, []);
useEffect(() => {
let timer;
if (recording) {
timer = setTimeout(async () => {
await stopRecording();
}, 3000);
}
return () => {
clearTimeout(timer);
};
}, [recording]);
useEffect(() => {
(async () => {
const { status } = await Camera.requestPermissionsAsync();
setHasPermission(status === 'granted');
})();
}, []);
const updatePlaybackStatus = (status) => {
if (status.isLoaded) {
setProgress(status.positionMillis / status.durationMillis);
}
};
const getCharColor = (charIndex) => {
const charProgress = charIndex / text.length;
return charProgress <= progress ? '#FFCE6D' : 'white';
};
const navigateToPhoneticWords = () => {
navigation.navigate('PhoneticWord');
};
const screenBackgroundPath = './assets/ST/st_bg.png';
return (
<View style={styles.container}>
<ImageBackground
source={require(screenBackgroundPath)}
style={styles.background}
resizeMode="cover"
/>
<TouchableWithoutFeedback onPress={handleScreenTouch}>
<View style={styles.content}>
<View style={styles.backgroundWrapper}>
<ImageBackground
source={require(backgroundPath)}
style={styles.backgroundImage}
resizeMode="contain"
>
<Text style={styles.text}>
{text.split('').map((char, index) => (
<Text key={index} style={{ color: getCharColor(index) }}>
{char}
</Text>
))}
</Text>
</ImageBackground>
</View>
<TouchableOpacity onPress={navigateToPhoneticWords} style={styles.button}>
<Image source={require(buttonPath)} style={styles.buttonImage} />
</TouchableOpacity>
<Image source={require(owlPath)} style={styles.owlImage} />
{recording && <Image source={require(recordingIconPath)} style={styles.recordingIcon} />}
</View>
</TouchableWithoutFeedback>
{hasPermission && (
<Camera
style={styles.camera}
type={Camera.Constants.Type.front}
ref={cameraRef}
ratio="1:1"
/>
)}
<Toast ref={(ref) => Toast.setRef(ref)} />
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
},
background: {
position: 'absolute',
top: 0,
left: 0,
right: 0,
bottom: 0,
},
content: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
},
backgroundWrapper: {
width: '90%',
height: '80%',
alignItems: 'center',
justifyContent: 'center',
},
backgroundImage: {
width: '100%',
height: '100%',
alignItems: 'center',
justifyContent: 'center',
},
text: {
fontSize: 50,
fontWeight: 'bold',
},
button: {
position: 'absolute',
bottom: 20,
right: 20,
},
buttonImage: {
width: 50,
height: 50,
},
owlImage: {
position: 'absolute',
bottom: 10,
left: 30,
width: 190,
height: 140,
},
recordingIcon: {
position: 'absolute',
bottom: 90,
right: 22,
width:45,
height: 45,
},
rreceivedTextContainer: {
position: 'absolute',
bottom: 0,
paddingHorizontal: 10,
paddingVertical: 5,
alignItems: 'center',
justifyContent: 'center',
// backgroundColor: 'rgba(0, 0, 0, 0.6)', // Remove this line
},
receivedText: {
fontSize:35,
color: 'red', // Change the text color to white
},
camera: {
position: 'absolute',
top: 39,
right: 6,
width: 140,
height: 120,
overflow: 'hidden',
borderRadius: 5,
}
});
export default SpeechTherapyScreen_2;
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment