Commit b6e995cb authored by Lihinikaduwa D.N.R.  's avatar Lihinikaduwa D.N.R.

Merge branch 'it18257632' into 'master'

It18257632

See merge request !181
parents f003b5e4 3863768c
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -92,7 +92,7 @@ export default function Read() {
<ReadCategory
title={'Advanced'}
image={ImagePaths.roundTwo}
path={'ReadActivityBird'}
path={'ReadActivityDog'}
/>
</>
)}
......
......@@ -15,21 +15,18 @@ import {
} from 'react-native';
import Voice from '@react-native-voice/voice';
import {DummyReadResult, ImagePaths} from '../../../assets/read/data/ReadData';
import AudioRecord from 'react-native-audio-record';
import Client from '../../client/Client';
import AsyncStorage from '@react-native-async-storage/async-storage';
import {Authorize} from '../../auth/AuthenticateUser';
import Client from '../../client/Client';
export default function ReadActivityBird() {
// const userToken = Authorize();
const navigation = useNavigation();
const [readingData, setReadingData] = useState({activity: ''});
// let user = AsyncStorage.getItem('readingSession');
// useEffect(() => {
// if (Authorize) {
// }
// }, []);
const [count, setCount] = useState(1);
useEffect(() => {
Voice.destroy().then(Voice.removeAllListeners);
}, []);
useEffect(() => {
Voice.onSpeechStart = onSpeechStartHandler;
......@@ -41,28 +38,15 @@ export default function ReadActivityBird() {
};
}, []);
const getToken = data => {
AsyncStorage.getItem('readingSession')
.then(readingSession => {
sendRedingData(data, readingSession);
})
.catch(error => {
console.log(error);
});
};
const sendRedingData = (data, readingSession) => {
console.log('itemValue', data);
Client.post('reading/'+readingSession, JSON.stringify(data), {
Client.post('reading/' + readingSession, JSON.stringify(data), {
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
})
.then(res => {
console.log(res.data);
navigation.navigate('ReadActivity');
navigation.navigate('ReadActivityFish');
})
.catch(error => {
console.log(error);
......@@ -78,19 +62,39 @@ export default function ReadActivityBird() {
};
const onSpeechError = e => {
console.log('onSpeechError: ', e);
const result = DummyReadResult.value;
if (result.includes('hello')) {
AsyncStorage.getItem('userId')
.then(userId => {
const data = {
word: 'bird',
userId: 1,
userId: userId,
level: 1,
triedCount: 1,
triedCount: count,
};
if (result.includes('hello')) {
setReadingData(data);
console.log('data', getToken());
console.log('readingData:', readingData);
getToken(data);
})
.catch(error => {
console.log(error);
});
} else {
setCount(count + 1);
Voice.start('en-US');
}
console.log('count', count);
console.log('count', count);
};
const getToken = data => {
AsyncStorage.getItem('readingSession')
.then(readingSession => {
sendRedingData(data, readingSession);
})
.catch(error => {
console.log(error);
});
};
const startRecording = async () => {
......@@ -106,30 +110,20 @@ export default function ReadActivityBird() {
<View style={{flexDirection: 'column'}}>
<ImageBackground
style={styles.image}
source={require('../../../assets/read/image/activity-2-backg.jpeg')}>
source={ImagePaths.backgroundBasic}>
<View style={styles.imageContainer}>
<View style={styles.imageView}>
<View style={styles.robo}>
<Image
source={require('../../../assets/read/image/activity-2-rob.png')}></Image>
<Image source={ImagePaths.robot2}></Image>
</View>
</View>
<View style={styles.textBody}>
<Text style={styles.text}>Pronounce this Word!</Text>
</View>
</View>
{/* <View style={styles.textBody}>
<Text style={styles.text}>Pronounce this Word!</Text>
</View>
<View style={styles.robo}>
<Image
source={require('../../assets/read/activity-2-rob.png')}></Image>
</View> */}
<View>
<Image
style={styles.blackboard}
source={require('../../../assets/read/image/backboard3.png')}></Image>
<Image style={styles.blackboard} source={ImagePaths.bird}></Image>
</View>
<View style={styles.horizontalView}>
<TouchableHighlight onPress={startRecording}>
......@@ -170,7 +164,6 @@ const styles = StyleSheet.create({
box: {
width: 180,
height: 180,
// borderColor: "#000000",
backgroundColor: 'blue',
marginTop: -370,
marginLeft: 455,
......@@ -189,8 +182,8 @@ const styles = StyleSheet.create({
height: 200,
},
textBody: {
marginTop: 150,
marginLeft: -30,
marginTop: 130,
marginLeft: -50,
// backgroundColor: '#00008B',
width: 150,
borderRadius: 50,
......@@ -202,7 +195,7 @@ const styles = StyleSheet.create({
alignItems: 'center',
color: '#00008B',
borderRadius: 10,
backgroundColor: 'rgba(0,0,0,0.2)',
backgroundColor: 'rgba(2, 202, 106, 0.17)',
textAlign: 'center',
fontWeight: 'bold',
},
......@@ -217,11 +210,14 @@ const styles = StyleSheet.create({
height: 50,
},
horizontalView: {
backgroundColor: 'rgba(23, 0, 245, 0.17)',
borderRadius: 50,
flexDirection: 'row',
position: 'absolute',
bottom: 0,
flexDirection: 'row',
marginBottom: 140,
marginLeft: 50,
padding: 7,
},
});
import {useNavigation} from '@react-navigation/native';
import Orientation from 'react-native-orientation-locker';
import React, {useEffect, useState} from 'react';
import {
Text,
TouchableOpacity,
StyleSheet,
View,
ImageButton,
SafeAreaView,
ImageBackground,
Button,
Image,
TouchableHighlight,
} from 'react-native';
import Voice from '@react-native-voice/voice';
import {DummyReadResult, ImagePaths} from '../../../assets/read/data/ReadData';
import AsyncStorage from '@react-native-async-storage/async-storage';
import {Authorize} from '../../auth/AuthenticateUser';
import Client from '../../client/Client';
export default function ReadActivityDog() {
const navigation = useNavigation();
const [count, setCount] = useState(1);
useEffect(() => {
Voice.destroy().then(Voice.removeAllListeners);
}, []);
useEffect(() => {
Voice.onSpeechStart = onSpeechStartHandler;
Voice.onSpeechResults = onSpeechResultsHandler;
Voice.onSpeechError = onSpeechError;
return () => {
Voice.destroy().then(Voice.removeAllListeners);
};
}, []);
const sendRedingData = (data, readingSession) => {
Client.post('reading/' + readingSession, JSON.stringify(data), {
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
})
.then(res => {
navigation.navigate('ReadActivityBird');
})
.catch(error => {
console.log(error);
});
};
const onSpeechStartHandler = e => {
console.log('start handler =>> ', e);
};
const onSpeechResultsHandler = e => {
console.log('result handler =>> ', e);
};
const onSpeechError = e => {
console.log('onSpeechError: ', e);
const result = DummyReadResult.value;
if (result.includes('hello')) {
AsyncStorage.getItem('userId')
.then(userId => {
const data = {
word: 'dog',
userId: userId,
level: 1,
triedCount: count,
};
getToken(data);
})
.catch(error => {
console.log(error);
});
} else {
setCount(count + 1);
Voice.start('en-US');
}
console.log('count', count);
console.log('count', count);
};
const getToken = data => {
AsyncStorage.getItem('readingSession')
.then(readingSession => {
sendRedingData(data, readingSession);
})
.catch(error => {
console.log(error);
});
};
const startRecording = async () => {
try {
await Voice.start('en-US');
} catch (error) {
console.log('error =>> ', error);
}
};
return (
<SafeAreaView>
<View style={{flexDirection: 'column'}}>
<ImageBackground
style={styles.image}
source={ImagePaths.backgroundBasic}>
<View style={styles.imageContainer}>
<View style={styles.imageView}>
<View style={styles.robo}>
<Image source={ImagePaths.robot2}></Image>
</View>
</View>
<View style={styles.textBody}>
<Text style={styles.text}>Pronounce this Word!</Text>
</View>
</View>
<View>
<Image style={styles.blackboard} source={ImagePaths.dog}></Image>
</View>
<View style={styles.horizontalView}>
<TouchableHighlight onPress={startRecording}>
<Image
style={styles.imageButton}
source={{
uri: 'https://raw.githubusercontent.com/AboutReact/sampleresource/master/microphone.png',
}}
/>
</TouchableHighlight>
</View>
</ImageBackground>
</View>
</SafeAreaView>
);
}
const styles = StyleSheet.create({
imageContainer: {
flexDirection: 'row',
marginTop: 70,
},
imageView: {
width: 180,
height: 300,
// borderWidth:1,
// borderColor: "#000",
marginHorizontal: 1,
marginVertical: 100,
},
body: {
flex: 1,
},
image: {
width: '100%',
height: '100%',
},
box: {
width: 180,
height: 180,
backgroundColor: 'blue',
marginTop: -370,
marginLeft: 455,
borderRadius: 100,
},
blackboard: {
marginTop: -320,
marginLeft: 200,
width: '50%',
height: 300,
},
robo: {
marginTop: 90,
marginLeft: 5,
width: 150,
height: 200,
},
textBody: {
marginTop: 130,
marginLeft: -50,
// backgroundColor: '#00008B',
width: 150,
borderRadius: 50,
padding: 5,
},
text: {
fontSize: 25,
justifyContent: 'center',
alignItems: 'center',
color: '#00008B',
borderRadius: 10,
backgroundColor: 'rgba(2, 202, 106, 0.17)',
textAlign: 'center',
fontWeight: 'bold',
},
button: {
padding: 10,
marginLeft: 5,
color: '#000000',
},
imageButton: {
width: 50,
height: 50,
},
horizontalView: {
backgroundColor: 'rgba(23, 0, 245, 0.17)',
borderRadius: 50,
flexDirection: 'row',
position: 'absolute',
bottom: 0,
flexDirection: 'row',
marginBottom: 140,
marginLeft: 50,
padding: 7,
},
});
import {useNavigation} from '@react-navigation/native';
import Orientation from 'react-native-orientation-locker';
import React, {useEffect, useState} from 'react';
import {
Text,
TouchableOpacity,
StyleSheet,
View,
ImageButton,
SafeAreaView,
ImageBackground,
Button,
Image,
TouchableHighlight,
} from 'react-native';
import Voice from '@react-native-voice/voice';
import {DummyReadResult, ImagePaths} from '../../../assets/read/data/ReadData';
import AsyncStorage from '@react-native-async-storage/async-storage';
import {Authorize} from '../../auth/AuthenticateUser';
import Client from '../../client/Client';
export default function ReadActivityFish() {
const navigation = useNavigation();
const [count, setCount] = useState(1);
useEffect(() => {
Voice.destroy().then(Voice.removeAllListeners);
}, []);
useEffect(() => {
Voice.onSpeechStart = onSpeechStartHandler;
Voice.onSpeechResults = onSpeechResultsHandler;
Voice.onSpeechError = onSpeechError;
return () => {
Voice.destroy().then(Voice.removeAllListeners);
};
}, []);
const sendRedingData = (data, readingSession) => {
Client.post('reading/' + readingSession, JSON.stringify(data), {
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
})
.then(res => {
navigation.navigate('Read');
})
.catch(error => {
console.log(error);
});
};
const onSpeechStartHandler = e => {
console.log('start handler =>> ', e);
};
const onSpeechResultsHandler = e => {
console.log('result handler =>> ', e);
};
const onSpeechError = e => {
console.log('onSpeechError: ', e);
const result = DummyReadResult.value;
if (result.includes('hello')) {
AsyncStorage.getItem('userId')
.then(userId => {
const data = {
word: 'fish',
userId: userId,
level: 1,
triedCount: count,
};
getToken(data);
})
.catch(error => {
console.log(error);
});
} else {
setCount(count + 1);
Voice.start('en-US');
}
console.log('count', count);
console.log('count', count);
};
const getToken = data => {
AsyncStorage.getItem('readingSession')
.then(readingSession => {
sendRedingData(data, readingSession);
})
.catch(error => {
console.log(error);
});
};
const startRecording = async () => {
try {
await Voice.start('en-US');
} catch (error) {
console.log('error =>> ', error);
}
};
return (
<SafeAreaView>
<View style={{flexDirection: 'column'}}>
<ImageBackground
style={styles.image}
source={ImagePaths.backgroundBasic}>
<View style={styles.imageContainer}>
<View style={styles.imageView}>
<View style={styles.robo}>
<Image source={ImagePaths.robot2}></Image>
</View>
</View>
<View style={styles.textBody}>
<Text style={styles.text}>Pronounce this Word!</Text>
</View>
</View>
<View>
<Image style={styles.blackboard} source={ImagePaths.fish}></Image>
</View>
<View style={styles.horizontalView}>
<TouchableHighlight onPress={startRecording}>
<Image
style={styles.imageButton}
source={{
uri: 'https://raw.githubusercontent.com/AboutReact/sampleresource/master/microphone.png',
}}
/>
</TouchableHighlight>
</View>
</ImageBackground>
</View>
</SafeAreaView>
);
}
const styles = StyleSheet.create({
imageContainer: {
flexDirection: 'row',
marginTop: 70,
},
imageView: {
width: 180,
height: 300,
// borderWidth:1,
// borderColor: "#000",
marginHorizontal: 1,
marginVertical: 100,
},
body: {
flex: 1,
},
image: {
width: '100%',
height: '100%',
},
box: {
width: 180,
height: 180,
backgroundColor: 'blue',
marginTop: -370,
marginLeft: 455,
borderRadius: 100,
},
blackboard: {
marginTop: -320,
marginLeft: 200,
width: '50%',
height: 300,
},
robo: {
marginTop: 90,
marginLeft: 5,
width: 150,
height: 200,
},
textBody: {
marginTop: 130,
marginLeft: -50,
// backgroundColor: '#00008B',
width: 150,
borderRadius: 50,
padding: 5,
},
text: {
fontSize: 25,
justifyContent: 'center',
alignItems: 'center',
color: '#00008B',
borderRadius: 10,
backgroundColor: 'rgba(2, 202, 106, 0.17)',
textAlign: 'center',
fontWeight: 'bold',
},
button: {
padding: 10,
marginLeft: 5,
color: '#000000',
},
imageButton: {
width: 50,
height: 50,
},
horizontalView: {
backgroundColor: 'rgba(23, 0, 245, 0.17)',
borderRadius: 50,
flexDirection: 'row',
position: 'absolute',
bottom: 0,
flexDirection: 'row',
marginBottom: 140,
marginLeft: 50,
padding: 7,
},
});
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment