Commit 30526709 authored by Weesinghe W.M.P.D's avatar Weesinghe W.M.P.D

Initial commit by IT20019204

parent 6e753e51
Pipeline #6956 failed with stages
# Auto detect text files and perform LF normalization
* text=auto
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
/build
/captures
.externalNativeBuild
.cxx
local.properties
/build
\ No newline at end of file
plugins {
id 'com.android.application'
id 'com.google.android.libraries.mapsplatform.secrets-gradle-plugin'
}
android {
namespace 'com.example.thetrek'
compileSdk 33
defaultConfig {
applicationId "com.example.thetrek"
minSdk 26
targetSdk 32
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
implementation 'androidx.appcompat:appcompat:1.6.1'
implementation 'com.google.android.material:material:1.9.0'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.5'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.5.1'
implementation 'com.github.PhilJay:MPAndroidChart:v3.1.0'
// ARCore (Google Play Services for AR) library.
implementation 'com.google.ar:core:1.37.0'
implementation 'com.google.android.gms:play-services-location:19.0.1'
implementation 'com.google.android.gms:play-services-auth:19.0.0'
// Obj - a simple Wavefront OBJ file loader
// https://github.com/javagl/Obj
implementation 'de.javagl:obj:0.2.1'
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'com.google.android.material:material:1.1.0'
implementation 'androidx.concurrent:concurrent-futures:1.1.0'
implementation 'com.google.guava:guava:31.1-android'
implementation 'com.google.android.gms:play-services-maps:17.0.1'
implementation 'com.google.android.libraries.places:places:2.4.0'
implementation 'com.android.volley:volley:1.2.1'
// implementation 'androidx.appcompat:appcompat:1.3.1'
// implementation 'androidx.core:core-ktx:1.6.0'
// implementation 'com.google.android.material:material:1.4.0'
// implementation 'androidx.work:work-runtime:2.7.0'
implementation 'androidx.core:core:1.7.0' // Add this line
androidTestImplementation 'com.android.support.test:runner:1.0.2'
implementation 'org.tensorflow:tensorflow-android:1.13.1'
}
\ No newline at end of file
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
\ No newline at end of file
package com.example.thetrek;
import android.content.Context;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.example.thetrek", appContext.getPackageName());
}
}
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<vector
android:height="108dp"
android:width="108dp"
android:viewportHeight="108"
android:viewportWidth="108"
xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z"/>
<path android:fillColor="#00000000" android:pathData="M9,0L9,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,0L19,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M29,0L29,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M39,0L39,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M49,0L49,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M59,0L59,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M69,0L69,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M79,0L79,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M89,0L89,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M99,0L99,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,9L108,9"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,19L108,19"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,29L108,29"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,39L108,39"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,49L108,49"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,59L108,59"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,69L108,69"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,79L108,79"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,89L108,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,99L108,99"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,29L89,29"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,39L89,39"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,49L89,49"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,59L89,59"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,69L89,69"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,79L89,79"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M29,19L29,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M39,19L39,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M49,19L49,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M59,19L59,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M69,19L69,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M79,19L79,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
</vector>
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools">
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_NOTIFICATION_POLICY" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<uses-permission
android:name="android.permission.PACKAGE_USAGE_STATS"
tools:ignore="ProtectedPermissions" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<!--
Limits app visibility in the Google Play Store to ARCore supported devices
(https://developers.google.com/ar/devices).
-->
<uses-permission android:name="android.permission.BODY_SENSORS"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" android:maxSdkVersion="28"/>
<uses-permission android:name="android.permission.MANAGE_EXTERNAL_STORAGE"/>
<uses-feature
android:name="android.hardware.camera.ar"
android:required="true" />
<uses-feature
android:glEsVersion="0x00020000"
android:required="true" />
<uses-permission android:name="android.permission.POST_NOTIFICATIONS" />
<application
android:allowBackup="true"
android:dataExtractionRules="@xml/data_extraction_rules"
android:fullBackupContent="@xml/backup_rules"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.TheTrek"
android:usesCleartextTraffic="false"
tools:ignore="GoogleAppIndexingWarning"
tools:targetApi="31">
<receiver
android:name=".screens.mobileUsage.ScreenOnReceiver"
android:enabled="true"
android:exported="false">
<intent-filter>
<action android:name="android.intent.action.SCREEN_ON" />
</intent-filter>
</receiver>
<service
android:name=".screens.mobileUsage.NotificationService"
android:exported="false" />
<service
android:name=".screens.mobileUsage.ScreenMonitoringService"
android:enabled="true"
android:exported="false" />
<service
android:name=".UsageMonitorService"
android:exported="false" />
<activity
android:name=".geospatial.RewardingScreenActivity"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".geospatial.GeospatialActivity"
android:configChanges="orientation|screenSize"
android:exported="true"
android:screenOrientation="locked"
android:theme="@style/Theme.AppCompat.NoActionBar">
<intent-filter>
<!-- <action android:name="android.intent.action.MAIN"/> -->
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity
android:name=".screens.rewards.RewardsActivity"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".screens.locationsHandler.AddLocations"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".screens.locationsHandler.LocationsCrud"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".screens.mobileUsage.MobileUsageActivity"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".screens.getPhysical.GetPhysicalActivity"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".screens.findPlaces.FindPlacesActivity"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".SuggestActivity"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".HomeActivity"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".MonitorActivity"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".MainActivityLogin"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".SignUp"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".MainActivity"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".MainActivityRec"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<activity
android:name=".MainActivityRecognition"
android:exported="false">
<meta-data
android:name="android.app.lib_name"
android:value="" />
</activity>
<meta-data
android:name="com.google.ar.core"
android:value="required" />
<meta-data
android:name="com.google.android.ar.API_KEY"
android:value="AIzaSyAynx5iCqfkqmUyUp3b2FNzf3OCm1dOrbU"
/>
<meta-data
android:name="com.google.android.geo.API_KEY"
android:value="AIzaSyAynx5iCqfkqmUyUp3b2FNzf3OCm1dOrbU" />
</application>
</manifest>
\ No newline at end of file
The file "dfg.raw" is a raw image file of dimensions 64x64 with two color
channels stored in 16-bit floats. It can be regenerated by using the script
"generate_dfg_texture.py" provided in the ARCore SDK under /tools/.
This diff is collapsed.
This diff is collapsed.
#version 300 es
/*
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
precision mediump float;
uniform sampler2D u_Texture;
in vec2 v_TexCoord;
layout(location = 0) out vec4 o_FragColor;
void main() {
// Mirror texture coordinates over the X axis
vec2 texCoord = vec2(v_TexCoord.x, 1.0 - v_TexCoord.y);
o_FragColor = vec4(texture(u_Texture, texCoord).rgb, 1.0);
return;
}
#version 300 es
/*
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
uniform mat4 u_ModelView;
uniform mat4 u_ModelViewProjection;
layout(location = 0) in vec4 a_Position;
layout(location = 1) in vec2 a_TexCoord;
layout(location = 2) in vec3 a_Normal;
out vec2 v_TexCoord;
void main() {
v_TexCoord = a_TexCoord;
gl_Position = u_ModelViewProjection * a_Position;
}
#version 300 es
/*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#extension GL_OES_EGL_image_external_essl3 : require
precision mediump float;
uniform samplerExternalOES u_CameraColorTexture;
in vec2 v_CameraTexCoord;
layout(location = 0) out vec4 o_FragColor;
void main() { o_FragColor = texture(u_CameraColorTexture, v_CameraTexCoord); }
#version 300 es
/*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
layout(location = 0) in vec4 a_Position;
layout(location = 1) in vec2 a_CameraTexCoord;
// The virtual scene texture coordinate is unused in the background shader, but
// is defined in the BackgroundRenderer Mesh.
layout(location = 2) in vec2 a_VirtualSceneTexCoord;
out vec2 v_CameraTexCoord;
void main() {
gl_Position = a_Position;
v_CameraTexCoord = a_CameraTexCoord;
}
#version 300 es
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
precision mediump float;
// This shader pair shows the depth estimation instead of the camera image as
// the background. This behavior is mostly only useful as a demonstration of the
// depth feature.
uniform sampler2D u_CameraDepthTexture;
uniform sampler2D u_ColorMap;
in vec2 v_CameraTexCoord;
layout(location = 0) out vec4 o_FragColor;
float Depth_GetCameraDepthInMillimeters(const sampler2D depthTexture,
const vec2 depthUv) {
// Depth is packed into the red and green components of its texture.
// The texture is a normalized format, storing millimeters.
vec3 packedDepthAndVisibility = texture(depthTexture, depthUv).xyz;
return dot(packedDepthAndVisibility.xy, vec2(255.0, 256.0 * 255.0));
}
// Returns a color corresponding to the depth passed in.
//
// Uses Turbo color mapping:
// https://ai.googleblog.com/2019/08/turbo-improved-rainbow-colormap-for.html
//
// The input x is normalized in range 0 to 1.
vec3 Depth_GetColorVisualization(float x) {
return texture(u_ColorMap, vec2(x, 0.5)).rgb;
}
// Returns linear interpolation position of value between min and max bounds.
// E.g. InverseLerp(1100, 1000, 2000) returns 0.1.
float InverseLerp(float value, float min_bound, float max_bound) {
return clamp((value - min_bound) / (max_bound - min_bound), 0.0, 1.0);
}
void main() {
const float kMidDepthMeters = 8.0;
const float kMaxDepthMeters = 30.0;
// Interpolating in units of meters is more stable, due to limited floating
// point precision on GPU.
float depth_mm =
Depth_GetCameraDepthInMillimeters(u_CameraDepthTexture, v_CameraTexCoord);
float depth_meters = depth_mm * 0.001;
// Selects the portion of the color palette to use.
float normalizedDepth = 0.0;
if (depth_meters < kMidDepthMeters) {
// Short-range depth (0m to 8m) maps to first half of the color palette.
normalizedDepth = InverseLerp(depth_meters, 0.0, kMidDepthMeters) * 0.5;
} else {
// Long-range depth (8m to 30m) maps to second half of the color palette.
normalizedDepth =
InverseLerp(depth_meters, kMidDepthMeters, kMaxDepthMeters) * 0.5 + 0.5;
}
// Converts depth to color by with the selected value in the color map.
vec4 depth_color = vec4(Depth_GetColorVisualization(normalizedDepth), 1.0);
// Invalid depth (pixels with value 0) mapped to black.
depth_color.rgb *= sign(depth_meters);
o_FragColor = depth_color;
}
#version 300 es
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// This shader pair shows the depth estimation instead of the camera image as
// the background. This behavior is mostly only useful as a demonstration of the
// depth feature.
layout(location = 0) in vec4 a_Position;
layout(location = 1) in vec2 a_CameraTexCoord;
// The virtual scene texture coordinate is unused in the background shader, but
// is defined in the BackgroundRenderer Mesh.
layout(location = 2) in vec2 a_VirtualSceneTexCoord;
out vec2 v_CameraTexCoord;
void main() {
gl_Position = a_Position;
v_CameraTexCoord = a_CameraTexCoord;
}
#version 300 es
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
precision mediump float;
// The number of mipmap levels in the cubemap is equal to the number of
// roughness levels that we precalculate for filtering the cubemap for roughness
// in real-time.
const int kNumberOfRoughnessLevels = NUMBER_OF_MIPMAP_LEVELS;
// The number of importance samples to use for roughness filtering of the
// cubemap.
const int kNumberOfImportanceSamples = NUMBER_OF_IMPORTANCE_SAMPLES;
struct ImportanceSampleCacheEntry {
vec3 direction; // Direction to sample in tangent space
float contribution; // Weighted contribution of the sample's radiance
float level; // The mipmap level to sample from the cubemap. Can be
// in-between integer levels for trilinear filtering.
};
struct ImportanceSampleCache {
int number_of_entries;
ImportanceSampleCacheEntry entries[kNumberOfImportanceSamples];
};
// This array's length is one less than the number of roughness levels since the
// first roughness level can be skipped.
uniform ImportanceSampleCache
u_ImportanceSampleCaches[kNumberOfRoughnessLevels - 1];
// The source radiance cubemap to be filtered.
uniform samplerCube u_Cubemap;
// The roughness level that we are filtering for.
uniform int u_RoughnessLevel;
in vec2 v_Position;
#ifdef PX_LOCATION
layout(location = PX_LOCATION) out vec4 o_FragColorPX;
#endif
#ifdef NX_LOCATION
layout(location = NX_LOCATION) out vec4 o_FragColorNX;
#endif
#ifdef PY_LOCATION
layout(location = PY_LOCATION) out vec4 o_FragColorPY;
#endif
#ifdef NY_LOCATION
layout(location = NY_LOCATION) out vec4 o_FragColorNY;
#endif
#ifdef PZ_LOCATION
layout(location = PZ_LOCATION) out vec4 o_FragColorPZ;
#endif
#ifdef NZ_LOCATION
layout(location = NZ_LOCATION) out vec4 o_FragColorNZ;
#endif
vec4 Filter(const vec3 n) {
if (u_RoughnessLevel == 0) {
// Roughness level 0 is just a straight copy.
return vec4(textureLod(u_Cubemap, n, 0.0).rgb, 1.0);
}
vec3 up = abs(n.z) < 0.9999 ? vec3(0.0, 0.0, 1.0) : vec3(1.0, 0.0, 0.0);
mat3 tangentToWorld;
tangentToWorld[0] = normalize(cross(up, n));
tangentToWorld[1] = cross(n, tangentToWorld[0]);
tangentToWorld[2] = n;
// XXX: This clamp should not be necessary, but is here due to a compiler
// optimization bug affecting certain devices.
//
// Some drivers pre-calculate uniform expressions to reduce redundant
// computations. Certain drivers will incorrectly ignore uniform control flow
// and try to pre-calculate expressions that wouldn't be executed. So even
// though we explicitly short-circuit if `u_RoughnessLevel' is 0, the compiled
// shader still tries to calculate `u_ImportanceSampleCaches[u_RoughnessLevel
// - 1]', with obviously bad results.
ImportanceSampleCache cache = u_ImportanceSampleCaches[max(0, u_RoughnessLevel - 1)];
vec3 radiance = vec3(0.0);
for (int i = 0; i < cache.number_of_entries; ++i) {
ImportanceSampleCacheEntry entry = cache.entries[i];
radiance +=
textureLod(u_Cubemap, tangentToWorld * entry.direction, entry.level)
.rgb *
entry.contribution;
}
return vec4(radiance, 1.0);
}
void main() {
float u = v_Position.x;
float v = v_Position.y;
#ifdef PX_LOCATION
o_FragColorPX = Filter(normalize(vec3(+1, -v, -u)));
#endif
#ifdef NX_LOCATION
o_FragColorNX = Filter(normalize(vec3(-1, -v, +u)));
#endif
#ifdef PY_LOCATION
o_FragColorPY = Filter(normalize(vec3(+u, +1, +v)));
#endif
#ifdef NY_LOCATION
o_FragColorNY = Filter(normalize(vec3(+u, -1, -v)));
#endif
#ifdef PZ_LOCATION
o_FragColorPZ = Filter(normalize(vec3(+u, -v, +1)));
#endif
#ifdef NZ_LOCATION
o_FragColorNZ = Filter(normalize(vec3(-u, -v, -1)));
#endif
}
#version 300 es
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
layout(location = 0) in vec4 a_Position;
out vec2 v_Position;
void main() {
gl_Position = a_Position;
v_Position = a_Position.xy;
}
This diff is collapsed.
#version 300 es
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
uniform mat4 u_ModelView;
uniform mat4 u_ModelViewProjection;
layout(location = 0) in vec4 a_Position;
layout(location = 1) in vec2 a_TexCoord;
layout(location = 2) in vec3 a_Normal;
out vec3 v_ViewPosition;
out vec3 v_ViewNormal;
out vec2 v_TexCoord;
void main() {
v_ViewPosition = (u_ModelView * a_Position).xyz;
v_ViewNormal = normalize((u_ModelView * vec4(a_Normal, 0.0)).xyz);
v_TexCoord = a_TexCoord;
gl_Position = u_ModelViewProjection * a_Position;
}
#version 300 es
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
precision mediump float;
// The virtual scene as rendered to a texture via a framebuffer. This will be
// composed with the background image depending on which modes were set in
// DepthCompositionRenderer.setDepthModes.
uniform sampler2D u_VirtualSceneColorTexture;
#if USE_OCCLUSION
// The AR camera depth texture.
uniform sampler2D u_CameraDepthTexture;
// The depth texture for the virtual scene.
uniform sampler2D u_VirtualSceneDepthTexture;
// The near and far clipping planes, used to transform the virtual scene depth
// back into view space to compare with the camera depth texture.
uniform float u_ZNear;
uniform float u_ZFar;
// The aspect ratio of the screen. This is used during to create uniform
// blurring for occluded objects.
uniform float u_DepthAspectRatio;
#endif // USE_OCCLUSION
#if USE_OCCLUSION
in vec2 v_CameraTexCoord;
#endif // USE_OCCLUSION
in vec2 v_VirtualSceneTexCoord;
layout(location = 0) out vec4 o_FragColor;
#if USE_OCCLUSION
float Depth_GetCameraDepthInMillimeters(const sampler2D depthTexture,
const vec2 depthUv) {
// Depth is packed into the red and green components of its texture.
// The texture is a normalized format, storing millimeters.
vec3 packedDepthAndVisibility = texture(depthTexture, depthUv).xyz;
return dot(packedDepthAndVisibility.xy, vec2(255.0, 256.0 * 255.0));
}
float Depth_GetVirtualSceneDepthMillimeters(const sampler2D depthTexture,
const vec2 depthUv, float zNear,
float zFar) {
// Determine the depth of the virtual scene fragment in millimeters.
const float kMetersToMillimeters = 1000.0;
// This value was empirically chosen to correct errors with objects appearing
// to phase through the floor. In millimeters.
const float kBias = -80.0;
float ndc = 2.0 * texture(depthTexture, depthUv).x - 1.0;
return 2.0 * zNear * zFar / (zFar + zNear - ndc * (zFar - zNear)) *
kMetersToMillimeters +
kBias;
}
// Returns a value between 0.0 (completely visible) and 1.0 (completely
// occluded), representing how visible or occluded is the pixel in relation to
// the depth map.
float Depth_GetOcclusion(const sampler2D depthTexture, const vec2 depthUv,
float assetDepthMm) {
float depthMm = Depth_GetCameraDepthInMillimeters(depthTexture, depthUv);
// Instead of a hard z-buffer test, allow the asset to fade into the
// background along a 2 * kDepthTolerancePerMm * assetDepthMm
// range centered on the background depth.
const float kDepthTolerancePerMm = 0.01;
return clamp(1.0 -
0.5 * (depthMm - assetDepthMm) /
(kDepthTolerancePerMm * assetDepthMm) +
0.5,
0.0, 1.0);
}
float Depth_GetBlurredOcclusionAroundUV(const sampler2D depthTexture,
const vec2 uv, float assetDepthMm) {
// Kernel used:
// 0 4 7 4 0
// 4 16 26 16 4
// 7 26 41 26 7
// 4 16 26 16 4
// 0 4 7 4 0
const float kKernelTotalWeights = 269.0;
float sum = 0.0;
const float kOcclusionBlurAmount = 0.01;
vec2 blurriness =
vec2(kOcclusionBlurAmount, kOcclusionBlurAmount * u_DepthAspectRatio);
float current = 0.0;
current += Depth_GetOcclusion(
depthTexture, uv + vec2(-1.0, -2.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(+1.0, -2.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(-1.0, +2.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(+1.0, +2.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(-2.0, +1.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(+2.0, +1.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(-2.0, -1.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(+2.0, -1.0) * blurriness, assetDepthMm);
sum += current * 4.0;
current = 0.0;
current += Depth_GetOcclusion(
depthTexture, uv + vec2(-2.0, -0.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(+2.0, +0.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(+0.0, +2.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(-0.0, -2.0) * blurriness, assetDepthMm);
sum += current * 7.0;
current = 0.0;
current += Depth_GetOcclusion(
depthTexture, uv + vec2(-1.0, -1.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(+1.0, -1.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(-1.0, +1.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(+1.0, +1.0) * blurriness, assetDepthMm);
sum += current * 16.0;
current = 0.0;
current += Depth_GetOcclusion(
depthTexture, uv + vec2(+0.0, +1.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(-0.0, -1.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(-1.0, -0.0) * blurriness, assetDepthMm);
current += Depth_GetOcclusion(
depthTexture, uv + vec2(+1.0, +0.0) * blurriness, assetDepthMm);
sum += current * 26.0;
sum += Depth_GetOcclusion(depthTexture, uv, assetDepthMm) * 41.0;
return sum / kKernelTotalWeights;
}
#endif // USE_OCCLUSION
void main() {
o_FragColor = texture(u_VirtualSceneColorTexture, v_VirtualSceneTexCoord);
#if USE_OCCLUSION
if (o_FragColor.a == 0.0) {
// There's no sense in calculating occlusion for a fully transparent pixel.
return;
}
float assetDepthMm = Depth_GetVirtualSceneDepthMillimeters(
u_VirtualSceneDepthTexture, v_VirtualSceneTexCoord, u_ZNear, u_ZFar);
float occlusion = Depth_GetBlurredOcclusionAroundUV(
u_CameraDepthTexture, v_CameraTexCoord, assetDepthMm);
// If the above blur operation is too expensive, you can replace it with the
// following lines.
/* float occlusion = Depth_GetOcclusion(u_CameraDepthTexture,
v_CameraTexCoord, assetDepthMm); */
// The virtual object mask is blurred, we make the falloff steeper to simulate
// erosion operator. This is needed to make the fully occluded virtual object
// invisible.
float objectMaskEroded = pow(occlusion, 10.0);
// occlusionTransition equal to 1 means fully occluded object. This operation
// boosts occlusion near the edges of the virtual object, but does not affect
// occlusion within the object.
float occlusionTransition =
clamp(occlusion * (2.0 - objectMaskEroded), 0.0, 1.0);
// Clips occlusion if we want to partially show fully occluded object.
float kMaxOcclusion = 1.0;
occlusionTransition = min(occlusionTransition, kMaxOcclusion);
o_FragColor *= 1.0 - occlusion;
#endif // USE_OCCLUSION
}
#version 300 es
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
layout(location = 0) in vec4 a_Position;
layout(location = 1) in vec2 a_CameraTexCoord;
layout(location = 2) in vec2 a_VirtualSceneTexCoord;
#if USE_OCCLUSION
out vec2 v_CameraTexCoord;
#endif
out vec2 v_VirtualSceneTexCoord;
void main() {
gl_Position = a_Position;
#if USE_OCCLUSION
v_CameraTexCoord = a_CameraTexCoord;
#endif
v_VirtualSceneTexCoord = a_VirtualSceneTexCoord;
}
#version 300 es
/*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
precision highp float;
uniform sampler2D u_Texture;
uniform vec4 u_GridControl; // dotThreshold, lineThreshold, lineFadeShrink,
// occlusionShrink
in vec3 v_TexCoordAlpha;
layout(location = 0) out vec4 o_FragColor;
void main() {
vec4 control = texture(u_Texture, v_TexCoordAlpha.xy);
float dotScale = v_TexCoordAlpha.z;
float lineFade =
max(0.0, u_GridControl.z * v_TexCoordAlpha.z - (u_GridControl.z - 1.0));
float alpha = (control.r * dotScale > u_GridControl.x) ? 1.0
: (control.g > u_GridControl.y) ? lineFade
: (0.1 * lineFade);
o_FragColor = vec4(alpha * v_TexCoordAlpha.z);
}
#version 300 es
/*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
uniform mat4 u_Model;
uniform mat4 u_ModelViewProjection;
uniform mat2 u_PlaneUvMatrix;
uniform vec3 u_Normal;
layout(location = 0) in vec3 a_XZPositionAlpha; // (x, z, alpha)
out vec3 v_TexCoordAlpha;
void main() {
vec4 local_pos = vec4(a_XZPositionAlpha.x, 0.0, a_XZPositionAlpha.y, 1.0);
vec4 world_pos = u_Model * local_pos;
// Construct two vectors that are orthogonal to the normal.
// This arbitrary choice is not co-linear with either horizontal
// or vertical plane normals.
const vec3 arbitrary = vec3(1.0, 1.0, 0.0);
vec3 vec_u = normalize(cross(u_Normal, arbitrary));
vec3 vec_v = normalize(cross(u_Normal, vec_u));
// Project vertices in world frame onto vec_u and vec_v.
vec2 uv = vec2(dot(world_pos.xyz, vec_u), dot(world_pos.xyz, vec_v));
v_TexCoordAlpha = vec3(u_PlaneUvMatrix * uv, a_XZPositionAlpha.z);
gl_Position = u_ModelViewProjection * local_pos;
}
#version 300 es
/*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
precision mediump float;
uniform vec4 u_Color;
out vec4 o_FragColor;
void main() {
o_FragColor = u_Color;
}
#version 300 es
/*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
uniform mat4 u_ModelViewProjection;
uniform float u_PointSize;
layout(location = 0) in vec4 a_Position;
void main() {
gl_Position = u_ModelViewProjection * vec4(a_Position.xyz, 1.0);
gl_PointSize = u_PointSize;
}
/*
* Copyright 2023 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
precision mediump float;
varying vec4 v_Color;
void main() {
gl_FragColor = v_Color;
}
/*
* Copyright 2023 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
uniform mat4 u_ModelViewProjection;
uniform vec4 u_Color;
uniform float u_PointSize;
attribute vec4 a_Position;
varying vec4 v_Color;
void main() {
v_Color = u_Color;
gl_Position = u_ModelViewProjection * vec4(a_Position.xyz, 1.0);
gl_PointSize = u_PointSize;
}
package com.example.thetrek;
import android.content.Context;
import org.tensorflow.contrib.android.TensorFlowInferenceInterface;
public class HARClassifier {
static {
System.loadLibrary("tensorflow_inference");
}
private TensorFlowInferenceInterface inferenceInterface;
private static final String MODEL_FILE = "file:///android_asset/frozen_HAR.pb";
private static final String INPUT_NODE = "LSTM_1_input";
private static final String[] OUTPUT_NODES = {"Dense_2/Softmax"};
private static final String OUTPUT_NODE = "Dense_2/Softmax";
private static final long[] INPUT_SIZE = {1, 100, 12};
private static final int OUTPUT_SIZE = 7;
public HARClassifier(final Context context) {
inferenceInterface = new TensorFlowInferenceInterface(context.getAssets(), MODEL_FILE);
}
public float[] predictProbabilities(float[] data) {
float[] result = new float[OUTPUT_SIZE];
inferenceInterface.feed(INPUT_NODE, data, INPUT_SIZE);
inferenceInterface.run(OUTPUT_NODES);
inferenceInterface.fetch(OUTPUT_NODE, result);
//Biking Downstairs Jogging Sitting Standing Upstairs Walking
return result;
}
}
package com.example.thetrek;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.database.Cursor;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import com.example.thetrek.db.LocationDAO;
import com.example.thetrek.db.LocationsDbHelper;
import com.example.thetrek.db.RewardDbHelper;
import com.example.thetrek.db.RewardStrings;
import com.example.thetrek.geospatial.GeospatialActivity;
import com.example.thetrek.screens.findPlaces.FindPlacesActivity;
import com.example.thetrek.screens.getPhysical.GetPhysicalActivity;
import com.example.thetrek.screens.locationsHandler.AddLocations;
import com.example.thetrek.screens.locationsHandler.Location;
import com.example.thetrek.screens.locationsHandler.LocationsCrud;
import com.example.thetrek.screens.mobileUsage.MobileUsageActivity;
import com.example.thetrek.screens.rewards.Reward;
import com.example.thetrek.screens.rewards.RewardsActivity;
import java.util.ArrayList;
import java.util.List;
public class HomeActivity extends AppCompatActivity {
private LocationDAO locationDAO;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home);
getSupportActionBar().hide();
locationDAO = new LocationDAO(this);
locationDAO.open();
Button findPlacesButton = (Button) findViewById(R.id.btn_homeActivity_findPlaces);
findPlacesButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), FindPlacesActivity.class);
v.getContext().startActivity(intent);
}
});
Button activityRecognitionButton = (Button) findViewById(R.id.btn_activityRecognition);
activityRecognitionButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), MainActivityRecognition.class);
v.getContext().startActivity(intent);
}
});
Button recorderButton = (Button) findViewById(R.id.btn_recorder);
recorderButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), MainActivityRec.class);
v.getContext().startActivity(intent);
}
});
Button mobileUsageButton = (Button) findViewById(R.id.btn_homeActivity_mobileUsage);
mobileUsageButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), MobileUsageActivity.class);
v.getContext().startActivity(intent);
}
});
Button suggestpredictor = (Button) findViewById(R.id.btn_predictor);
suggestpredictor.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), SuggestActivity.class);
v.getContext().startActivity(intent);
}
});
Button getPhysicalButton = (Button) findViewById(R.id.btn_homeActivity_getPhysical);
getPhysicalButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), GeospatialActivity.class);
v.getContext().startActivity(intent);
}
});
Button rewards = (Button) findViewById(R.id.btn_homeActivity_rewards);
rewards.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), RewardsActivity.class);
v.getContext().startActivity(intent);
}
});
List<Location> locationList = getLocationDataFromDatabase();
Button locationsHandler = (Button) findViewById(R.id.btn_homeActivity_locationsHandler);
locationsHandler.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// if (locationList.isEmpty()){
// Intent intent = new Intent(v.getContext(), AddLocations.class);
// v.getContext().startActivity(intent);
// }else {
Intent intent = new Intent(v.getContext(), LocationsCrud.class);
v.getContext().startActivity(intent);
// }
}
});
}
private List<Location> getLocationDataFromDatabase() {
List<Location> locationList = new ArrayList<>();
Cursor cursor = locationDAO.getAllLocations();
if (cursor != null && cursor.moveToFirst()) {
int idIndex = cursor.getColumnIndex(LocationsDbHelper.COLUMN_ID);
int latitudeIndex = cursor.getColumnIndex(LocationsDbHelper.COLUMN_LATITUDE);
int longitudeIndex = cursor.getColumnIndex(LocationsDbHelper.COLUMN_LONGITUDE);
int isVisitedIndex = cursor.getColumnIndex(LocationsDbHelper.COLUMN_IS_VISITED);
int locationNameIndex = cursor.getColumnIndex(LocationsDbHelper.COLUMN_NAME);
int elevationIndex = cursor.getColumnIndex(LocationsDbHelper.COLUMN_ELEVATION);
do {
int id = cursor.getInt(idIndex);
double latitude = cursor.getDouble(latitudeIndex);
double longitude = cursor.getDouble(longitudeIndex);
boolean isVisited = cursor.getInt(isVisitedIndex) == 1;
String locationName = cursor.getString(locationNameIndex);
double elevation = cursor.getDouble(elevationIndex);
Location location = new Location(id, latitude, longitude, isVisited, locationName, elevation);
locationList.add(location);
} while (cursor.moveToNext());
}
if (cursor != null) {
cursor.close();
}
return locationList;
}
}
\ No newline at end of file
package com.example.thetrek;
//import android.content.Intent;
//import android.database.Cursor;
//import android.os.Bundle;
//import android.view.View;
//import android.widget.Button;
//import android.widget.EditText;
//import android.widget.TextView;
//import android.widget.Toast;
//
//import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
//import com.example.thetrek.Sql.DBHelper;
public class Login extends AppCompatActivity {
// EditText email , password;
// Button btnSubmit;
// TextView createAcc;
// DBHelper dbHelper;
//
//
// @Override
// protected void onCreate(Bundle savedInstanceState) {
// super.onCreate(savedInstanceState);
// Boolean e=false,p=false;
// setContentView(R.layout.activity_login);
// email=findViewById(R.id.user_name);
// password=findViewById(R.id.text_password);
// btnSubmit = findViewById(R.id.btnSubmit_login);
// dbHelper = new DBHelper(this);
// btnSubmit.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View view) {
//
// String emailCheck = email.getText().toString();
// String passCheck = password.getText().toString();
// Cursor cursor = dbHelper.getData();
// if(cursor.getCount() == 0){
// Toast.makeText(Login.this,"No entries Exists",Toast.LENGTH_LONG).show();
// }
// if (loginCheck(cursor,emailCheck,passCheck)) {
// Intent intent = new Intent(Login.this,FinalPage.class);
// intent.putExtra("email",emailCheck);
// email.setText("");
// password.setText("");
// startActivity(intent);
// }else {
// AlertDialog.Builder builder = new AlertDialog.Builder(Login.this);
// builder.setCancelable(true);
// builder.setTitle("Wrong Credential");
// builder.setMessage("Wrong Credential");
// builder.show();
// }
// dbHelper.close();
// }
// });
// createAcc=findViewById(R.id.createAcc);
// createAcc.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View view) {
// Intent intent = new Intent(Login.this,SignUp.class);
// startActivity(intent);
// }
// });
//
// }
// public static boolean loginCheck(Cursor cursor,String emailCheck,String passCheck) {
// while (cursor.moveToNext()){
// if (cursor.getString(0).equals(emailCheck)) {
// if (cursor.getString(2).equals(passCheck)) {
// return true;
// }
// return false;
// }
// }
// return false;
// }
}
\ No newline at end of file
//package com.example.thetrek;
//
//import android.content.Intent;
//import android.os.Bundle;
//import android.view.View;
//import android.widget.Button;
//
//import androidx.appcompat.app.AppCompatActivity;
//import androidx.appcompat.widget.Toolbar;
//
//import com.example.thetrek.Sql.DBHelper;
//
//public class MainActivity extends AppCompatActivity {
// Button login,Reg;
// Toolbar toolbar;
// DBHelper dbHelper;
//
// @Override
// public void onBackPressed() {
// MainActivity.this.finish();
// }
//
// @Override
// protected void onCreate(Bundle savedInstanceState) {
//
// super.onCreate(savedInstanceState);
// setContentView(R.layout.activity_main1);
// dbHelper = new DBHelper(this);
// login =(Button) findViewById(R.id.btnSubmit_login);
//
// login.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View view) {
// Intent intent = new Intent(MainActivity.this, MainActivityLogin.class);
// startActivity(intent);
// }
// });
// Reg = findViewById(R.id.createAcc);
// Reg.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View view) {
// Intent intent = new Intent(MainActivity.this,SignUp.class);
// startActivity(intent);
// }
// });
//
// }}
package com.example.thetrek;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import com.example.thetrek.Sql.DBHelper;
public class MainActivity extends AppCompatActivity {
Button login, Reg;
Toolbar toolbar;
DBHelper dbHelper;
@Override
public void onBackPressed() {
MainActivity.this.finish();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main1);
dbHelper = new DBHelper(this);
// Add this code to start the service
startService(new Intent(this, UsageMonitorService.class));
login = (Button) findViewById(R.id.btnSubmit_login);
login.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(MainActivity.this, MainActivityLogin.class);
startActivity(intent);
}
});
Reg = findViewById(R.id.createAcc);
Reg.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(MainActivity.this, SignUp.class);
startActivity(intent);
}
});
}
@Override
protected void onDestroy() {
super.onDestroy();
// Add this code to stop the service when the activity is destroyed
stopService(new Intent(this, UsageMonitorService.class));
}
}
package com.example.thetrek;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import com.example.thetrek.screens.mobileUsage.ScreenMonitoringService;
public class MainActivityLogin extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
getSupportActionBar().hide();
startService(new Intent(this, ScreenMonitoringService.class));
Button loginButton = (Button) findViewById(R.id.btnSubmit_login);
loginButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(v.getContext(), HomeActivity.class);
v.getContext().startActivity(intent);
}
});
}
}
\ No newline at end of file
This diff is collapsed.
package com.example.thetrek;
import android.content.Intent;
import android.os.Bundle;
import androidx.appcompat.app.AppCompatActivity;
public class MonitorActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
startService(new Intent(this, UsageMonitorService.class));
}
@Override
protected void onDestroy() {
super.onDestroy();
stopService(new Intent(this, UsageMonitorService.class));
}
}
package com.example.thetrek;
import android.content.Context;
import android.os.Environment;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.ArrayList;
public class RecordingDB {
private static final String TAG = "RecordingDB";
// Key variable
private final String ACC_X = "acc_x";
private final String ACC_Y = "acc_y";
private final String ACC_Z = "acc_z";
private final String GYRO_X = "gyro_x";
private final String GYRO_Y = "gyro_y";
private final String GYRO_Z = "gyro_z";
private final String LA_X = "la_x";
private final String LA_Y = "la_y";
private final String LA_Z = "la_z";
private final String LABEL_TEXT = "Activity";
private final String COMMA_SEP = ",";
private final Context context;
private final ArrayList<String> acc_data;
private final ArrayList<String> gyro_data;
private final ArrayList<String> la_data;
private final String label;
public RecordingDB(Context context, ArrayList<String> acc_data, ArrayList<String> gyro_data, ArrayList<String> la_data, String label) {
this.context = context;
this.acc_data = acc_data;
this.gyro_data = gyro_data;
this.la_data = la_data;
this.label = label;
}
public int saveData(String filename) {
int min = Math.min(acc_data.size(), Math.min(gyro_data.size(), la_data.size()));
if (acc_data.size() > min) for (int i = 0; i <= Math.abs(acc_data.size() - min); i++)
acc_data.remove(acc_data.size() - 1);
if (gyro_data.size() > min) for (int i = 0; i <= Math.abs(gyro_data.size() - min); i++)
gyro_data.remove(gyro_data.size() - 1);
if (la_data.size() > min) for (int i = 0; i <= Math.abs(la_data.size() - min); i++)
la_data.remove(la_data.size() - 1);
File outputFile = new File(Environment.getExternalStorageDirectory() + File.separator + "HAR_Recordings" + File.separator + filename + ".csv");
try {
PrintWriter writer = new PrintWriter(outputFile);
writer.println(ACC_X + COMMA_SEP + ACC_Y + COMMA_SEP + ACC_Z + COMMA_SEP +
GYRO_X + COMMA_SEP + GYRO_Y + COMMA_SEP + GYRO_Z + COMMA_SEP +
LA_X + COMMA_SEP + LA_Y + COMMA_SEP + LA_Z + COMMA_SEP + LABEL_TEXT);
for (int i = 0; i < min; i++) {
String a_data = acc_data.get(i).replace("[", "").replace("]", "");
String g_data = gyro_data.get(i).replace("[", "").replace("]", "");
String l_data = la_data.get(i).replace("[", "").replace("]", "");
writer.println(a_data + COMMA_SEP + g_data + COMMA_SEP + l_data + COMMA_SEP + label);
}
writer.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
}
return min;
}
}
package com.example.thetrek;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import com.example.thetrek.Sql.DBHelper;
public class SignUp extends AppCompatActivity {
EditText name , number , email,pass;
TextView login;
DBHelper dbHelper;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_sign_up);
name=findViewById(R.id.textName);
number=findViewById(R.id.textNumber);
email=findViewById(R.id.textEmail);
pass=findViewById(R.id.textPass);
Button signUpAcc = findViewById(R.id.btnSignUpAcc);
dbHelper = new DBHelper(this);
signUpAcc.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
String name1 = name.getText().toString();
String number1 = number.getText().toString();
String email1 = email.getText().toString();
String pass1 = pass.getText().toString();
boolean b =dbHelper.insetUserData(name1,number1,email1,pass1);
if (b){
Toast.makeText(SignUp.this,"Data inserted",Toast.LENGTH_SHORT).show();
Intent i = new Intent(SignUp.this,Login.class);
startActivity(i);
}else {
Toast.makeText(SignUp.this,"Failed To insert Data",Toast.LENGTH_SHORT).show();
}
}
});
login=findViewById(R.id.loginAcc);
login.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent i = new Intent(SignUp.this,MainActivityLogin.class);
startActivity(i);
}
});
}
}
\ No newline at end of file
package com.example.thetrek.Sql;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
public class DBHelper extends SQLiteOpenHelper {
public DBHelper(Context context ) {
super(context,"UserData",null, 1);
}
@Override
public void onCreate(SQLiteDatabase DB) {
DB.execSQL("create Table UserDetails(userID TEXT primary key,name TEXT,password PASSWORD,number NUMBER)");
}
@Override
public void onUpgrade(SQLiteDatabase DB, int i, int i1) {
DB.execSQL("drop Table if exists UserDetails");
}
public Boolean insetUserData(String name,String number,String email,String password){
SQLiteDatabase DB = this.getWritableDatabase();
ContentValues contentValues = new ContentValues();
contentValues.put("userID",email);
contentValues.put("name",name);
contentValues.put("password",password);
contentValues.put("number",number);
long result= DB.insert("UserDetails",null,contentValues);
if (result == -1){
return false;
}else {
return true;
}
}
public Cursor getData(){
SQLiteDatabase DB = this.getWritableDatabase();
Cursor cursor = DB.rawQuery("Select * from Userdetails ",null);
return cursor;
}
}
\ No newline at end of file
package com.example.thetrek;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import com.android.volley.Request;
import com.android.volley.RequestQueue;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.StringRequest;
import com.android.volley.toolbox.Volley;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.HashMap;
import java.util.Map;
public class SuggestActivity extends AppCompatActivity {
EditText age,gender,weather, Place_change, Time;
Button predict;
TextView result;
String url = "https://researchsuggestapp-2056b0768919.herokuapp.com/predict";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_suggest);
age = findViewById(R.id.age);
gender = findViewById(R.id.gender);
weather = findViewById(R.id.weather);
Place_change = findViewById(R.id.Place_change);
Time = findViewById(R.id.Time);
predict = findViewById(R.id.predict);
result = findViewById(R.id.result);
predict.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// hit the API -> Volley
StringRequest stringRequest = new StringRequest(Request.Method.POST, url,
new Response.Listener<String>() {
@Override
public void onResponse(String response) {
try {
JSONObject jsonObject = new JSONObject(response);
String data = jsonObject.getString("Suguest");
if(data.equals("1")){
result.setText("\uD83C\uDF1F \"You Need to Move!\" \uD83C\uDF1F");
}else{
result.setText("\uD83C\uDF1F \"No Need to Move.\" \uD83C\uDF1F");
}
} catch (JSONException e) {
e.printStackTrace();
}
}
},
new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
Toast.makeText(SuggestActivity.this, error.getMessage(), Toast.LENGTH_SHORT).show();
}
}){
@Override
protected Map<String,String> getParams(){
Map<String,String> params = new HashMap<String,String>();
params.put("age",age.getText().toString());
params.put("gender",gender.getText().toString());
params.put("weather",weather.getText().toString());
params.put("Place_change", Place_change.getText().toString());
params.put("Time",Time.getText().toString());
return params;
}
};
RequestQueue queue = Volley.newRequestQueue(SuggestActivity.this);
queue.add(stringRequest);
}
});
}
}
\ No newline at end of file
package com.example.thetrek;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Handler;
import android.os.IBinder;
import androidx.core.app.NotificationCompat;
public class UsageMonitorService extends Service {
private static final String TAG = "UsageMonitorService";
private static final int NOTIFICATION_ID = 1;
private static final String CHANNEL_ID = "UsageMonitorChannel";
private static final long USAGE_THRESHOLD = 2 * 60 * 1000; // 2 minutes in milliseconds
private boolean isScreenOn = true;
private long usageStartTime = 0;
private Handler handler;
private Runnable checkUsageRunnable;
@Override
public void onCreate() {
super.onCreate();
handler = new Handler();
checkUsageRunnable = new Runnable() {
@Override
public void run() {
if (isScreenOn) {
if (usageStartTime == 0) {
usageStartTime = System.currentTimeMillis();
} else {
long currentTime = System.currentTimeMillis();
long usageTime = currentTime - usageStartTime;
if (usageTime > USAGE_THRESHOLD) {
sendNotification();
}
}
} else {
usageStartTime = 0;
}
handler.postDelayed(this, 1000); // Check every second
}
};
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
createNotificationChannel();
startForeground(NOTIFICATION_ID, createNotification());
IntentFilter filter = new IntentFilter(Intent.ACTION_SCREEN_ON);
filter.addAction(Intent.ACTION_SCREEN_OFF);
registerReceiver(screenReceiver, filter);
handler.post(checkUsageRunnable);
return START_STICKY;
}
@Override
public void onDestroy() {
super.onDestroy();
unregisterReceiver(screenReceiver);
handler.removeCallbacks(checkUsageRunnable);
}
private void createNotificationChannel() {
NotificationChannel channel = new NotificationChannel(
CHANNEL_ID,
"Usage Monitor Channel",
NotificationManager.IMPORTANCE_DEFAULT
);
NotificationManager notificationManager = getSystemService(NotificationManager.class);
notificationManager.createNotificationChannel(channel);
}
private Notification createNotification() {
Intent intent = new Intent(this, MainActivity.class);
PendingIntent pendingIntent = PendingIntent.getActivity(this, 0, intent, 0);
return new NotificationCompat.Builder(this, CHANNEL_ID)
.setContentTitle("Mobile Usage Monitor")
.setContentText("Monitoring your mobile usage...")
.setSmallIcon(R.mipmap.ic_launcher2)
.setContentIntent(pendingIntent)
.build();
}
private void sendNotification() {
NotificationCompat.Builder builder = new NotificationCompat.Builder(this, CHANNEL_ID)
.setSmallIcon(R.mipmap.ic_launcher1)
.setContentTitle("Mobile Usage Alert")
.setContentText("📱🕒 You've been glued to your phone for over 2 minutes! 🚶‍♂️ It's time to stretch those legs and get moving! 🏃‍♀️💨")
.setPriority(NotificationCompat.PRIORITY_DEFAULT);
NotificationManager notificationManager = getSystemService(NotificationManager.class);
notificationManager.notify(NOTIFICATION_ID, builder.build());
}
private final BroadcastReceiver screenReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction() != null) {
if (intent.getAction().equals(Intent.ACTION_SCREEN_ON)) {
isScreenOn = true;
} else if (intent.getAction().equals(Intent.ACTION_SCREEN_OFF)) {
isScreenOn = false;
}
}
}
};
@Override
public IBinder onBind(Intent intent) {
return null;
}
}
/*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.helpers;
import android.Manifest;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.provider.Settings;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
/** Helper to ask camera permission. */
public final class CameraPermissionHelper {
private static final int CAMERA_PERMISSION_CODE = 0;
private static final String CAMERA_PERMISSION = Manifest.permission.CAMERA;
/** Check to see we have the necessary permissions for this app. */
public static boolean hasCameraPermission(Activity activity) {
return ContextCompat.checkSelfPermission(activity, CAMERA_PERMISSION)
== PackageManager.PERMISSION_GRANTED;
}
/** Check to see we have the necessary permissions for this app, and ask for them if we don't. */
public static void requestCameraPermission(Activity activity) {
ActivityCompat.requestPermissions(
activity, new String[] {CAMERA_PERMISSION}, CAMERA_PERMISSION_CODE);
}
/** Check to see if we need to show the rationale for this permission. */
public static boolean shouldShowRequestPermissionRationale(Activity activity) {
return ActivityCompat.shouldShowRequestPermissionRationale(activity, CAMERA_PERMISSION);
}
/** Launch Application Setting to grant permission. */
public static void launchPermissionSettings(Activity activity) {
Intent intent = new Intent();
intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
intent.setData(Uri.fromParts("package", activity.getPackageName(), null));
activity.startActivity(intent);
}
}
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.helpers;
import android.content.Context;
import android.content.SharedPreferences;
/** Manages the Occlusion option setting and shared preferences. */
public class DepthSettings {
public static final String SHARED_PREFERENCES_ID = "SHARED_PREFERENCES_OCCLUSION_OPTIONS";
public static final String SHARED_PREFERENCES_SHOW_DEPTH_ENABLE_DIALOG_OOBE =
"show_depth_enable_dialog_oobe";
public static final String SHARED_PREFERENCES_USE_DEPTH_FOR_OCCLUSION = "use_depth_for_occlusion";
// Current depth-based settings used by the app.
private boolean depthColorVisualizationEnabled = false;
private boolean useDepthForOcclusion = false;
private SharedPreferences sharedPreferences;
/** Initializes the current settings based on when the app was last used. */
public void onCreate(Context context) {
sharedPreferences = context.getSharedPreferences(SHARED_PREFERENCES_ID, Context.MODE_PRIVATE);
useDepthForOcclusion =
sharedPreferences.getBoolean(SHARED_PREFERENCES_USE_DEPTH_FOR_OCCLUSION, false);
}
/** Retrieves whether depth-based occlusion is enabled. */
public boolean useDepthForOcclusion() {
return useDepthForOcclusion;
}
public void setUseDepthForOcclusion(boolean enable) {
if (enable == useDepthForOcclusion) {
return; // No change.
}
// Updates the stored default settings.
useDepthForOcclusion = enable;
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putBoolean(SHARED_PREFERENCES_USE_DEPTH_FOR_OCCLUSION, useDepthForOcclusion);
editor.apply();
}
/** Retrieves whether to render the depth map visualization instead of the camera feed. */
public boolean depthColorVisualizationEnabled() {
return depthColorVisualizationEnabled;
}
public void setDepthColorVisualizationEnabled(boolean depthColorVisualizationEnabled) {
this.depthColorVisualizationEnabled = depthColorVisualizationEnabled;
}
/** Determines if the initial prompt to use depth-based occlusion should be shown. */
public boolean shouldShowDepthEnableDialog() {
// Checks if this dialog has been called before on this device.
boolean showDialog =
sharedPreferences.getBoolean(SHARED_PREFERENCES_SHOW_DEPTH_ENABLE_DIALOG_OOBE, true);
if (showDialog) {
// Only ever shows the dialog on the first time. If the user wants to adjust these settings
// again, they can use the gear icon to invoke the settings menu dialog.
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putBoolean(SHARED_PREFERENCES_SHOW_DEPTH_ENABLE_DIALOG_OOBE, false);
editor.apply();
}
return showDialog;
}
}
/*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.helpers;
import android.app.Activity;
import android.content.Context;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.display.DisplayManager;
import android.hardware.display.DisplayManager.DisplayListener;
import android.view.Display;
import android.view.Surface;
import android.view.WindowManager;
import com.google.ar.core.Session;
/**
* Helper to track the display rotations. In particular, the 180 degree rotations are not notified
* by the onSurfaceChanged() callback, and thus they require listening to the android display
* events.
*/
public final class DisplayRotationHelper implements DisplayListener {
private boolean viewportChanged;
private int viewportWidth;
private int viewportHeight;
private final Display display;
private final DisplayManager displayManager;
private final CameraManager cameraManager;
/**
* Constructs the DisplayRotationHelper but does not register the listener yet.
*
* @param context the Android {@link Context}.
*/
public DisplayRotationHelper(Context context) {
displayManager = (DisplayManager) context.getSystemService(Context.DISPLAY_SERVICE);
cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
display = windowManager.getDefaultDisplay();
}
/** Registers the display listener. Should be called from {@link Activity#onResume()}. */
public void onResume() {
displayManager.registerDisplayListener(this, null);
}
/** Unregisters the display listener. Should be called from {@link Activity#onPause()}. */
public void onPause() {
displayManager.unregisterDisplayListener(this);
}
/**
* Records a change in surface dimensions. This will be later used by {@link
* #updateSessionIfNeeded(Session)}. Should be called from {@link
* android.opengl.GLSurfaceView.Renderer
* #onSurfaceChanged(javax.microedition.khronos.opengles.GL10, int, int)}.
*
* @param width the updated width of the surface.
* @param height the updated height of the surface.
*/
public void onSurfaceChanged(int width, int height) {
viewportWidth = width;
viewportHeight = height;
viewportChanged = true;
}
/**
* Updates the session display geometry if a change was posted either by {@link
* #onSurfaceChanged(int, int)} call or by {@link #onDisplayChanged(int)} system callback. This
* function should be called explicitly before each call to {@link Session#update()}. This
* function will also clear the 'pending update' (viewportChanged) flag.
*
* @param session the {@link Session} object to update if display geometry changed.
*/
public void updateSessionIfNeeded(Session session) {
if (viewportChanged) {
int displayRotation = display.getRotation();
session.setDisplayGeometry(displayRotation, viewportWidth, viewportHeight);
viewportChanged = false;
}
}
/**
* Returns the aspect ratio of the GL surface viewport while accounting for the display rotation
* relative to the device camera sensor orientation.
*/
public float getCameraSensorRelativeViewportAspectRatio(String cameraId) {
float aspectRatio;
int cameraSensorToDisplayRotation = getCameraSensorToDisplayRotation(cameraId);
switch (cameraSensorToDisplayRotation) {
case 90:
case 270:
aspectRatio = (float) viewportHeight / (float) viewportWidth;
break;
case 0:
case 180:
aspectRatio = (float) viewportWidth / (float) viewportHeight;
break;
default:
throw new RuntimeException("Unhandled rotation: " + cameraSensorToDisplayRotation);
}
return aspectRatio;
}
/**
* Returns the rotation of the back-facing camera with respect to the display. The value is one of
* 0, 90, 180, 270.
*/
public int getCameraSensorToDisplayRotation(String cameraId) {
CameraCharacteristics characteristics;
try {
characteristics = cameraManager.getCameraCharacteristics(cameraId);
} catch (CameraAccessException e) {
throw new RuntimeException("Unable to determine display orientation", e);
}
// Camera sensor orientation.
int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// Current display orientation.
int displayOrientation = toDegrees(display.getRotation());
// Make sure we return 0, 90, 180, or 270 degrees.
return (sensorOrientation - displayOrientation + 360) % 360;
}
private int toDegrees(int rotation) {
switch (rotation) {
case Surface.ROTATION_0:
return 0;
case Surface.ROTATION_90:
return 90;
case Surface.ROTATION_180:
return 180;
case Surface.ROTATION_270:
return 270;
default:
throw new RuntimeException("Unknown rotation " + rotation);
}
}
@Override
public void onDisplayAdded(int displayId) {}
@Override
public void onDisplayRemoved(int displayId) {}
@Override
public void onDisplayChanged(int displayId) {
viewportChanged = true;
}
}
/*
* Copyright 2023 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.helpers;
import android.content.Context;
import android.content.SharedPreferences;
/**
* A class providing persistent EIS preference across instances using {@code
* android.content.SharedPreferences}.
*/
public class EisSettings {
public static final String SHARED_PREFERENCE_ID = "SHARED_PREFERENCE_EIS_OPTIONS";
public static final String SHARED_PREFERENCE_EIS_ENABLED = "eis_enabled";
private boolean eisEnabled = false;
private SharedPreferences sharedPreferences;
/** Creates shared preference entry for EIS setting. */
public void onCreate(Context context) {
sharedPreferences = context.getSharedPreferences(SHARED_PREFERENCE_ID, Context.MODE_PRIVATE);
eisEnabled = sharedPreferences.getBoolean(SHARED_PREFERENCE_EIS_ENABLED, false);
}
/** Returns saved EIS state. */
public boolean isEisEnabled() {
return eisEnabled;
}
/** Sets and saves the EIS using {@code android.content.SharedPreferences} */
public void setEisEnabled(boolean enable) {
if (enable == eisEnabled) {
return;
}
eisEnabled = enable;
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putBoolean(SHARED_PREFERENCE_EIS_ENABLED, eisEnabled);
editor.apply();
}
}
/*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.helpers;
import android.app.Activity;
import android.view.View;
/** Helper to set up the Android full screen mode. */
public final class FullScreenHelper {
/**
* Sets the Android fullscreen flags. Expected to be called from {@link
* Activity#onWindowFocusChanged(boolean hasFocus)}.
*
* @param activity the Activity on which the full screen mode will be set.
* @param hasFocus the hasFocus flag passed from the {@link Activity#onWindowFocusChanged(boolean
* hasFocus)} callback.
*/
public static void setFullScreenOnWindowFocusChanged(Activity activity, boolean hasFocus) {
if (hasFocus) {
// https://developer.android.com/training/system-ui/immersive.html#sticky
activity
.getWindow()
.getDecorView()
.setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
}
}
}
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.helpers;
import android.content.Context;
import android.content.SharedPreferences;
/** Manages the Instant Placement option setting and shared preferences. */
public class InstantPlacementSettings {
public static final String SHARED_PREFERENCES_ID = "SHARED_PREFERENCES_INSTANT_PLACEMENT_OPTIONS";
public static final String SHARED_PREFERENCES_INSTANT_PLACEMENT_ENABLED =
"instant_placement_enabled";
private boolean instantPlacementEnabled = true;
private SharedPreferences sharedPreferences;
/** Initializes the current settings based on the saved value. */
public void onCreate(Context context) {
sharedPreferences = context.getSharedPreferences(SHARED_PREFERENCES_ID, Context.MODE_PRIVATE);
instantPlacementEnabled =
sharedPreferences.getBoolean(SHARED_PREFERENCES_INSTANT_PLACEMENT_ENABLED, false);
}
/** Retrieves whether Instant Placement is enabled, */
public boolean isInstantPlacementEnabled() {
return instantPlacementEnabled;
}
public void setInstantPlacementEnabled(boolean enable) {
if (enable == instantPlacementEnabled) {
return; // No change.
}
// Updates the stored default settings.
instantPlacementEnabled = enable;
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putBoolean(SHARED_PREFERENCES_INSTANT_PLACEMENT_ENABLED, instantPlacementEnabled);
editor.apply();
}
}
/*
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.helpers;
import android.Manifest;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.provider.Settings;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
/** Helper to ask location permission. */
public final class LocationPermissionHelper {
private static final int LOCATION_PERMISSION_CODE = 1;
private static final String LOCATION_PERMISSION = Manifest.permission.ACCESS_FINE_LOCATION;
/** Check to see we have the necessary permissions for this app. */
public static boolean hasFineLocationPermission(Activity activity) {
return ContextCompat.checkSelfPermission(activity, LOCATION_PERMISSION)
== PackageManager.PERMISSION_GRANTED;
}
/** Check to see we have the necessary permissions for this app, and ask for them if we don't. */
public static void requestFineLocationPermission(Activity activity) {
ActivityCompat.requestPermissions(
activity, new String[] {LOCATION_PERMISSION}, LOCATION_PERMISSION_CODE);
}
/** Check to see if the array of given permissions contain the location permission. */
public static boolean hasFineLocationPermissionsResponseInResult(String[] permissions) {
for (String permission : permissions) {
if (LOCATION_PERMISSION.equals(permission)) {
return true;
}
}
return false;
}
/** Check to see if we need to show the rationale for this permission. */
public static boolean shouldShowRequestPermissionRationale(Activity activity) {
return ActivityCompat.shouldShowRequestPermissionRationale(activity, LOCATION_PERMISSION);
}
/** Launch Application Setting to grant permission. */
public static void launchPermissionSettings(Activity activity) {
Intent intent = new Intent();
intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
intent.setData(Uri.fromParts("package", activity.getPackageName(), null));
activity.startActivity(intent);
}
}
/*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.helpers;
import android.app.Activity;
import android.view.View;
import android.widget.TextView;
import com.google.android.material.snackbar.BaseTransientBottomBar;
import com.google.android.material.snackbar.Snackbar;
/**
* Helper to manage the sample snackbar. Hides the Android boilerplate code, and exposes simpler
* methods.
*/
public final class SnackbarHelper {
private static final int BACKGROUND_COLOR = 0xbf323232;
private Snackbar messageSnackbar;
private enum DismissBehavior { HIDE, SHOW, FINISH };
private int maxLines = 2;
private String lastMessage = "";
private View snackbarView;
public boolean isShowing() {
return messageSnackbar != null;
}
/** Shows a snackbar with a given message. */
public void showMessage(Activity activity, String message) {
if (!message.isEmpty() && (!isShowing() || !lastMessage.equals(message))) {
lastMessage = message;
show(activity, message, DismissBehavior.HIDE);
}
}
/** Shows a snackbar with a given message, and a dismiss button. */
public void showMessageWithDismiss(Activity activity, String message) {
show(activity, message, DismissBehavior.SHOW);
}
/** Shows a snackbar with a given message for Snackbar.LENGTH_SHORT milliseconds */
public void showMessageForShortDuration(Activity activity, String message) {
show(activity, message, DismissBehavior.SHOW, Snackbar.LENGTH_SHORT);
}
/** Shows a snackbar with a given message for Snackbar.LENGTH_LONG milliseconds */
public void showMessageForLongDuration(Activity activity, String message) {
show(activity, message, DismissBehavior.SHOW, Snackbar.LENGTH_LONG);
}
/**
* Shows a snackbar with a given error message. When dismissed, will finish the activity. Useful
* for notifying errors, where no further interaction with the activity is possible.
*/
public void showError(Activity activity, String errorMessage) {
show(activity, errorMessage, DismissBehavior.FINISH);
}
/**
* Hides the currently showing snackbar, if there is one. Safe to call from any thread. Safe to
* call even if snackbar is not shown.
*/
public void hide(Activity activity) {
if (!isShowing()) {
return;
}
lastMessage = "";
Snackbar messageSnackbarToHide = messageSnackbar;
messageSnackbar = null;
activity.runOnUiThread(
new Runnable() {
@Override
public void run() {
messageSnackbarToHide.dismiss();
}
});
}
public void setMaxLines(int lines) {
maxLines = lines;
}
/**
* Sets the view that will be used to find a suitable parent view to hold the Snackbar view.
*
* <p>To use the root layout ({@link android.R.id.content}), pass in {@code null}.
*
* @param snackbarView the view to pass to {@link
* Snackbar#make(…)} which will be used to find a
* suitable parent, which is a {@link androidx.coordinatorlayout.widget.CoordinatorLayout}, or
* the window decor's content view, whichever comes first.
*/
public void setParentView(View snackbarView) {
this.snackbarView = snackbarView;
}
private void show(Activity activity, String message, DismissBehavior dismissBehavior) {
show(activity, message, dismissBehavior, Snackbar.LENGTH_INDEFINITE);
}
private void show(
final Activity activity,
final String message,
final DismissBehavior dismissBehavior,
int duration) {
activity.runOnUiThread(
new Runnable() {
@Override
public void run() {
messageSnackbar =
Snackbar.make(
snackbarView == null
? activity.findViewById(android.R.id.content)
: snackbarView,
message,
duration);
messageSnackbar.getView().setBackgroundColor(BACKGROUND_COLOR);
if (dismissBehavior != DismissBehavior.HIDE && duration == Snackbar.LENGTH_INDEFINITE) {
messageSnackbar.setAction(
"Dismiss",
new View.OnClickListener() {
@Override
public void onClick(View v) {
messageSnackbar.dismiss();
}
});
if (dismissBehavior == DismissBehavior.FINISH) {
messageSnackbar.addCallback(
new BaseTransientBottomBar.BaseCallback<Snackbar>() {
@Override
public void onDismissed(Snackbar transientBottomBar, int event) {
super.onDismissed(transientBottomBar, event);
activity.finish();
}
});
}
}
((TextView)
messageSnackbar
.getView()
.findViewById(com.google.android.material.R.id.snackbar_text))
.setMaxLines(maxLines);
messageSnackbar.show();
}
});
}
}
/*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.helpers;
import android.content.Context;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
/**
* Helper to detect taps using Android GestureDetector, and pass the taps between UI thread and
* render thread.
*/
public final class TapHelper implements OnTouchListener {
private final GestureDetector gestureDetector;
private final BlockingQueue<MotionEvent> queuedSingleTaps = new ArrayBlockingQueue<>(16);
/**
* Creates the tap helper.
*
* @param context the application's context.
*/
public TapHelper(Context context) {
gestureDetector =
new GestureDetector(
context,
new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onSingleTapUp(MotionEvent e) {
// Queue tap if there is space. Tap is lost if queue is full.
queuedSingleTaps.offer(e);
return true;
}
@Override
public boolean onDown(MotionEvent e) {
return true;
}
});
}
/**
* Polls for a tap.
*
* @return if a tap was queued, a MotionEvent for the tap. Otherwise null if no taps are queued.
*/
public MotionEvent poll() {
return queuedSingleTaps.poll();
}
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
return gestureDetector.onTouchEvent(motionEvent);
}
}
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.helpers;
import android.app.Activity;
import android.view.WindowManager;
import com.google.ar.core.Camera;
import com.google.ar.core.TrackingFailureReason;
import com.google.ar.core.TrackingState;
/** Gets human readibly tracking failure reasons and suggested actions. */
public final class TrackingStateHelper {
private static final String INSUFFICIENT_FEATURES_MESSAGE =
"Can't find anything. Aim device at a surface with more texture or color.";
private static final String EXCESSIVE_MOTION_MESSAGE = "Moving too fast. Slow down.";
private static final String INSUFFICIENT_LIGHT_MESSAGE =
"Too dark. Try moving to a well-lit area.";
private static final String INSUFFICIENT_LIGHT_ANDROID_S_MESSAGE =
"Too dark. Try moving to a well-lit area."
+ " Also, make sure the Block Camera is set to off in system settings.";
private static final String BAD_STATE_MESSAGE =
"Tracking lost due to bad internal state. Please try restarting the AR experience.";
private static final String CAMERA_UNAVAILABLE_MESSAGE =
"Another app is using the camera. Tap on this app or try closing the other one.";
private static final int ANDROID_S_SDK_VERSION = 31;
private final Activity activity;
private TrackingState previousTrackingState;
public TrackingStateHelper(Activity activity) {
this.activity = activity;
}
/** Keep the screen unlocked while tracking, but allow it to lock when tracking stops. */
public void updateKeepScreenOnFlag(TrackingState trackingState) {
if (trackingState == previousTrackingState) {
return;
}
previousTrackingState = trackingState;
switch (trackingState) {
case PAUSED:
case STOPPED:
activity.runOnUiThread(
() -> activity.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON));
break;
case TRACKING:
activity.runOnUiThread(
() -> activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON));
break;
}
}
public static String getTrackingFailureReasonString(Camera camera) {
TrackingFailureReason reason = camera.getTrackingFailureReason();
switch (reason) {
case NONE:
return "";
case BAD_STATE:
return BAD_STATE_MESSAGE;
case INSUFFICIENT_LIGHT:
if (android.os.Build.VERSION.SDK_INT < ANDROID_S_SDK_VERSION) {
return INSUFFICIENT_LIGHT_MESSAGE;
} else {
return INSUFFICIENT_LIGHT_ANDROID_S_MESSAGE;
}
case EXCESSIVE_MOTION:
return EXCESSIVE_MOTION_MESSAGE;
case INSUFFICIENT_FEATURES:
return INSUFFICIENT_FEATURES_MESSAGE;
case CAMERA_UNAVAILABLE:
return CAMERA_UNAVAILABLE_MESSAGE;
}
return "Unknown tracking failure reason: " + reason;
}
}
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.samplerender;
import android.opengl.GLES30;
import android.util.Log;
import java.io.Closeable;
/** A framebuffer associated with a texture. */
public class Framebuffer implements Closeable {
private static final String TAG = Framebuffer.class.getSimpleName();
private final int[] framebufferId = {0};
private final Texture colorTexture;
private final Texture depthTexture;
private int width = -1;
private int height = -1;
/**
* Constructs a {@link Framebuffer} which renders internally to a texture.
*
* <p>In order to render to the {@link Framebuffer}, use {@link SampleRender#draw(Mesh, Shader,
* Framebuffer)}.
*/
public Framebuffer(SampleRender render, int width, int height) {
try {
colorTexture =
new Texture(
render,
Texture.Target.TEXTURE_2D,
Texture.WrapMode.CLAMP_TO_EDGE,
/*useMipmaps=*/ false);
depthTexture =
new Texture(
render,
Texture.Target.TEXTURE_2D,
Texture.WrapMode.CLAMP_TO_EDGE,
/*useMipmaps=*/ false);
// Set parameters of the depth texture so that it's readable by shaders.
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, depthTexture.getTextureId());
GLError.maybeThrowGLException("Failed to bind depth texture", "glBindTexture");
GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_COMPARE_MODE, GLES30.GL_NONE);
GLError.maybeThrowGLException("Failed to set texture parameter", "glTexParameteri");
GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_NEAREST);
GLError.maybeThrowGLException("Failed to set texture parameter", "glTexParameteri");
GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_NEAREST);
GLError.maybeThrowGLException("Failed to set texture parameter", "glTexParameteri");
// Set initial dimensions.
resize(width, height);
// Create framebuffer object and bind to the color and depth textures.
GLES30.glGenFramebuffers(1, framebufferId, 0);
GLError.maybeThrowGLException("Framebuffer creation failed", "glGenFramebuffers");
GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, framebufferId[0]);
GLError.maybeThrowGLException("Failed to bind framebuffer", "glBindFramebuffer");
GLES30.glFramebufferTexture2D(
GLES30.GL_FRAMEBUFFER,
GLES30.GL_COLOR_ATTACHMENT0,
GLES30.GL_TEXTURE_2D,
colorTexture.getTextureId(),
/*level=*/ 0);
GLError.maybeThrowGLException(
"Failed to bind color texture to framebuffer", "glFramebufferTexture2D");
GLES30.glFramebufferTexture2D(
GLES30.GL_FRAMEBUFFER,
GLES30.GL_DEPTH_ATTACHMENT,
GLES30.GL_TEXTURE_2D,
depthTexture.getTextureId(),
/*level=*/ 0);
GLError.maybeThrowGLException(
"Failed to bind depth texture to framebuffer", "glFramebufferTexture2D");
int status = GLES30.glCheckFramebufferStatus(GLES30.GL_FRAMEBUFFER);
if (status != GLES30.GL_FRAMEBUFFER_COMPLETE) {
throw new IllegalStateException("Framebuffer construction not complete: code " + status);
}
} catch (Throwable t) {
close();
throw t;
}
}
@Override
public void close() {
if (framebufferId[0] != 0) {
GLES30.glDeleteFramebuffers(1, framebufferId, 0);
GLError.maybeLogGLError(Log.WARN, TAG, "Failed to free framebuffer", "glDeleteFramebuffers");
framebufferId[0] = 0;
}
colorTexture.close();
depthTexture.close();
}
/** Resizes the framebuffer to the given dimensions. */
public void resize(int width, int height) {
if (this.width == width && this.height == height) {
return;
}
this.width = width;
this.height = height;
// Color texture
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, colorTexture.getTextureId());
GLError.maybeThrowGLException("Failed to bind color texture", "glBindTexture");
GLES30.glTexImage2D(
GLES30.GL_TEXTURE_2D,
/*level=*/ 0,
GLES30.GL_RGBA,
width,
height,
/*border=*/ 0,
GLES30.GL_RGBA,
GLES30.GL_UNSIGNED_BYTE,
/*pixels=*/ null);
GLError.maybeThrowGLException("Failed to specify color texture format", "glTexImage2D");
// Depth texture
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, depthTexture.getTextureId());
GLError.maybeThrowGLException("Failed to bind depth texture", "glBindTexture");
GLES30.glTexImage2D(
GLES30.GL_TEXTURE_2D,
/*level=*/ 0,
GLES30.GL_DEPTH_COMPONENT32F,
width,
height,
/*border=*/ 0,
GLES30.GL_DEPTH_COMPONENT,
GLES30.GL_FLOAT,
/*pixels=*/ null);
GLError.maybeThrowGLException("Failed to specify depth texture format", "glTexImage2D");
}
/** Returns the color texture associated with this framebuffer. */
public Texture getColorTexture() {
return colorTexture;
}
/** Returns the depth texture associated with this framebuffer. */
public Texture getDepthTexture() {
return depthTexture;
}
/** Returns the width of the framebuffer. */
public int getWidth() {
return width;
}
/** Returns the height of the framebuffer. */
public int getHeight() {
return height;
}
/* package-private */
int getFramebufferId() {
return framebufferId[0];
}
}
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.samplerender;
import android.opengl.GLES30;
import android.opengl.GLException;
import android.opengl.GLU;
import android.util.Log;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/** Module for handling OpenGL errors. */
public class GLError {
/** Throws a {@link GLException} if a GL error occurred. */
public static void maybeThrowGLException(String reason, String api) {
List<Integer> errorCodes = getGlErrors();
if (errorCodes != null) {
throw new GLException(errorCodes.get(0), formatErrorMessage(reason, api, errorCodes));
}
}
/** Logs a message with the given logcat priority if a GL error occurred. */
public static void maybeLogGLError(int priority, String tag, String reason, String api) {
List<Integer> errorCodes = getGlErrors();
if (errorCodes != null) {
Log.println(priority, tag, formatErrorMessage(reason, api, errorCodes));
}
}
private static String formatErrorMessage(String reason, String api, List<Integer> errorCodes) {
StringBuilder builder = new StringBuilder(String.format("%s: %s: ", reason, api));
Iterator<Integer> iterator = errorCodes.iterator();
while (iterator.hasNext()) {
int errorCode = iterator.next();
builder.append(String.format("%s (%d)", GLU.gluErrorString(errorCode), errorCode));
if (iterator.hasNext()) {
builder.append(", ");
}
}
return builder.toString();
}
private static List<Integer> getGlErrors() {
int errorCode = GLES30.glGetError();
// Shortcut for no errors
if (errorCode == GLES30.GL_NO_ERROR) {
return null;
}
List<Integer> errorCodes = new ArrayList<>();
errorCodes.add(errorCode);
while (true) {
errorCode = GLES30.glGetError();
if (errorCode == GLES30.GL_NO_ERROR) {
break;
}
errorCodes.add(errorCode);
}
return errorCodes;
}
private GLError() {}
}
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.samplerender;
import android.opengl.GLES30;
import android.util.Log;
import java.nio.Buffer;
/* package-private */
class GpuBuffer {
private static final String TAG = GpuBuffer.class.getSimpleName();
// These values refer to the byte count of the corresponding Java datatypes.
public static final int INT_SIZE = 4;
public static final int FLOAT_SIZE = 4;
private final int target;
private final int numberOfBytesPerEntry;
private final int[] bufferId = {0};
private int size;
private int capacity;
public GpuBuffer(int target, int numberOfBytesPerEntry, Buffer entries) {
if (entries != null) {
if (!entries.isDirect()) {
throw new IllegalArgumentException("If non-null, entries buffer must be a direct buffer");
}
// Some GPU drivers will fail with out of memory errors if glBufferData or glBufferSubData is
// called with a size of 0, so avoid this case.
if (entries.limit() == 0) {
entries = null;
}
}
this.target = target;
this.numberOfBytesPerEntry = numberOfBytesPerEntry;
if (entries == null) {
this.size = 0;
this.capacity = 0;
} else {
this.size = entries.limit();
this.capacity = entries.limit();
}
try {
// Clear VAO to prevent unintended state change.
GLES30.glBindVertexArray(0);
GLError.maybeThrowGLException("Failed to unbind vertex array", "glBindVertexArray");
GLES30.glGenBuffers(1, bufferId, 0);
GLError.maybeThrowGLException("Failed to generate buffers", "glGenBuffers");
GLES30.glBindBuffer(target, bufferId[0]);
GLError.maybeThrowGLException("Failed to bind buffer object", "glBindBuffer");
if (entries != null) {
entries.rewind();
GLES30.glBufferData(
target, entries.limit() * numberOfBytesPerEntry, entries, GLES30.GL_DYNAMIC_DRAW);
}
GLError.maybeThrowGLException("Failed to populate buffer object", "glBufferData");
} catch (Throwable t) {
free();
throw t;
}
}
public void set(Buffer entries) {
// Some GPU drivers will fail with out of memory errors if glBufferData or glBufferSubData is
// called with a size of 0, so avoid this case.
if (entries == null || entries.limit() == 0) {
size = 0;
return;
}
if (!entries.isDirect()) {
throw new IllegalArgumentException("If non-null, entries buffer must be a direct buffer");
}
GLES30.glBindBuffer(target, bufferId[0]);
GLError.maybeThrowGLException("Failed to bind vertex buffer object", "glBindBuffer");
entries.rewind();
if (entries.limit() <= capacity) {
GLES30.glBufferSubData(target, 0, entries.limit() * numberOfBytesPerEntry, entries);
GLError.maybeThrowGLException("Failed to populate vertex buffer object", "glBufferSubData");
size = entries.limit();
} else {
GLES30.glBufferData(
target, entries.limit() * numberOfBytesPerEntry, entries, GLES30.GL_DYNAMIC_DRAW);
GLError.maybeThrowGLException("Failed to populate vertex buffer object", "glBufferData");
size = entries.limit();
capacity = entries.limit();
}
}
public void free() {
if (bufferId[0] != 0) {
GLES30.glDeleteBuffers(1, bufferId, 0);
GLError.maybeLogGLError(Log.WARN, TAG, "Failed to free buffer object", "glDeleteBuffers");
bufferId[0] = 0;
}
}
public int getBufferId() {
return bufferId[0];
}
public int getSize() {
return size;
}
}
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.samplerender;
import android.opengl.GLES30;
import java.io.Closeable;
import java.nio.IntBuffer;
/**
* A list of vertex indices stored GPU-side.
*
* <p>When constructing a {@link Mesh}, an {@link IndexBuffer} may be passed to describe the
* ordering of vertices when drawing each primitive.
*
* @see <a
* href="https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glDrawElements.xhtml">glDrawElements</a>
*/
public class IndexBuffer implements Closeable {
private final GpuBuffer buffer;
/**
* Construct an {@link IndexBuffer} populated with initial data.
*
* <p>The GPU buffer will be filled with the data in the <i>direct</i> buffer {@code entries},
* starting from the beginning of the buffer (not the current cursor position). The cursor will be
* left in an undefined position after this function returns.
*
* <p>The {@code entries} buffer may be null, in which case an empty buffer is constructed
* instead.
*/
public IndexBuffer(SampleRender render, IntBuffer entries) {
buffer = new GpuBuffer(GLES30.GL_ELEMENT_ARRAY_BUFFER, GpuBuffer.INT_SIZE, entries);
}
/**
* Populate with new data.
*
* <p>The entire buffer is replaced by the contents of the <i>direct</i> buffer {@code entries}
* starting from the beginning of the buffer, not the current cursor position. The cursor will be
* left in an undefined position after this function returns.
*
* <p>The GPU buffer is reallocated automatically if necessary.
*
* <p>The {@code entries} buffer may be null, in which case the buffer will become empty.
*/
public void set(IntBuffer entries) {
buffer.set(entries);
}
@Override
public void close() {
buffer.free();
}
/* package-private */
int getBufferId() {
return buffer.getBufferId();
}
/* package-private */
int getSize() {
return buffer.getSize();
}
}
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.samplerender;
import android.opengl.GLES30;
import android.util.Log;
import de.javagl.obj.Obj;
import de.javagl.obj.ObjData;
import de.javagl.obj.ObjReader;
import de.javagl.obj.ObjUtils;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
/**
* A collection of vertices, faces, and other attributes that define how to render a 3D object.
*
* <p>To render the mesh, use {@link SampleRender#draw()}.
*/
public class Mesh implements Closeable {
private static final String TAG = Mesh.class.getSimpleName();
/**
* The kind of primitive to render.
*
* <p>This determines how the data in {@link VertexBuffer}s are interpreted. See <a
* href="https://www.khronos.org/opengl/wiki/Primitive">here</a> for more on how primitives
* behave.
*/
public enum PrimitiveMode {
POINTS(GLES30.GL_POINTS),
LINE_STRIP(GLES30.GL_LINE_STRIP),
LINE_LOOP(GLES30.GL_LINE_LOOP),
LINES(GLES30.GL_LINES),
TRIANGLE_STRIP(GLES30.GL_TRIANGLE_STRIP),
TRIANGLE_FAN(GLES30.GL_TRIANGLE_FAN),
TRIANGLES(GLES30.GL_TRIANGLES);
/* package-private */
final int glesEnum;
private PrimitiveMode(int glesEnum) {
this.glesEnum = glesEnum;
}
}
private final int[] vertexArrayId = {0};
private final PrimitiveMode primitiveMode;
private final IndexBuffer indexBuffer;
private final VertexBuffer[] vertexBuffers;
/**
* Construct a {@link Mesh}.
*
* <p>The data in the given {@link IndexBuffer} and {@link VertexBuffer}s does not need to be
* finalized; they may be freely changed throughout the lifetime of a {@link Mesh} using their
* respective {@code set()} methods.
*
* <p>The ordering of the {@code vertexBuffers} is significant. Their array indices will
* correspond to their attribute locations, which must be taken into account in shader code. The
* <a href="https://www.khronos.org/opengl/wiki/Layout_Qualifier_(GLSL)">layout qualifier</a> must
* be used in the vertex shader code to explicitly associate attributes with these indices.
*/
public Mesh(
SampleRender render,
PrimitiveMode primitiveMode,
IndexBuffer indexBuffer,
VertexBuffer[] vertexBuffers) {
if (vertexBuffers == null || vertexBuffers.length == 0) {
throw new IllegalArgumentException("Must pass at least one vertex buffer");
}
this.primitiveMode = primitiveMode;
this.indexBuffer = indexBuffer;
this.vertexBuffers = vertexBuffers;
try {
// Create vertex array
GLES30.glGenVertexArrays(1, vertexArrayId, 0);
GLError.maybeThrowGLException("Failed to generate a vertex array", "glGenVertexArrays");
// Bind vertex array
GLES30.glBindVertexArray(vertexArrayId[0]);
GLError.maybeThrowGLException("Failed to bind vertex array object", "glBindVertexArray");
if (indexBuffer != null) {
GLES30.glBindBuffer(GLES30.GL_ELEMENT_ARRAY_BUFFER, indexBuffer.getBufferId());
}
for (int i = 0; i < vertexBuffers.length; ++i) {
// Bind each vertex buffer to vertex array
GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, vertexBuffers[i].getBufferId());
GLError.maybeThrowGLException("Failed to bind vertex buffer", "glBindBuffer");
GLES30.glVertexAttribPointer(
i, vertexBuffers[i].getNumberOfEntriesPerVertex(), GLES30.GL_FLOAT, false, 0, 0);
GLError.maybeThrowGLException(
"Failed to associate vertex buffer with vertex array", "glVertexAttribPointer");
GLES30.glEnableVertexAttribArray(i);
GLError.maybeThrowGLException(
"Failed to enable vertex buffer", "glEnableVertexAttribArray");
}
} catch (Throwable t) {
close();
throw t;
}
}
/**
* Constructs a {@link Mesh} from the given Wavefront OBJ file.
*
* <p>The {@link Mesh} will be constructed with three attributes, indexed in the order of local
* coordinates (location 0, vec3), texture coordinates (location 1, vec2), and vertex normals
* (location 2, vec3).
*/
public static Mesh createFromAsset(SampleRender render, String assetFileName) throws IOException {
try (InputStream inputStream = render.getAssets().open(assetFileName)) {
Obj obj = ObjUtils.convertToRenderable(ObjReader.read(inputStream));
// Obtain the data from the OBJ, as direct buffers:
IntBuffer vertexIndices = ObjData.getFaceVertexIndices(obj, /*numVerticesPerFace=*/ 3);
FloatBuffer localCoordinates = ObjData.getVertices(obj);
FloatBuffer textureCoordinates = ObjData.getTexCoords(obj, /*dimensions=*/ 2);
FloatBuffer normals = ObjData.getNormals(obj);
VertexBuffer[] vertexBuffers = {
new VertexBuffer(render, 3, localCoordinates),
new VertexBuffer(render, 2, textureCoordinates),
new VertexBuffer(render, 3, normals),
};
IndexBuffer indexBuffer = new IndexBuffer(render, vertexIndices);
return new Mesh(render, PrimitiveMode.TRIANGLES, indexBuffer, vertexBuffers);
}
}
@Override
public void close() {
if (vertexArrayId[0] != 0) {
GLES30.glDeleteVertexArrays(1, vertexArrayId, 0);
GLError.maybeLogGLError(
Log.WARN, TAG, "Failed to free vertex array object", "glDeleteVertexArrays");
}
}
/**
* Draws the mesh. Don't call this directly unless you are doing low level OpenGL code; instead,
* prefer {@link SampleRender#draw}.
*/
public void lowLevelDraw() {
if (vertexArrayId[0] == 0) {
throw new IllegalStateException("Tried to draw a freed Mesh");
}
GLES30.glBindVertexArray(vertexArrayId[0]);
GLError.maybeThrowGLException("Failed to bind vertex array object", "glBindVertexArray");
if (indexBuffer == null) {
// Sanity check for debugging
int vertexCount = vertexBuffers[0].getNumberOfVertices();
for (int i = 1; i < vertexBuffers.length; ++i) {
int iterCount = vertexBuffers[i].getNumberOfVertices();
if (iterCount != vertexCount) {
throw new IllegalStateException(
String.format(
"Vertex buffers have mismatching numbers of vertices ([0] has %d but [%d] has"
+ " %d)",
vertexCount, i, iterCount));
}
}
GLES30.glDrawArrays(primitiveMode.glesEnum, 0, vertexCount);
GLError.maybeThrowGLException("Failed to draw vertex array object", "glDrawArrays");
} else {
GLES30.glDrawElements(
primitiveMode.glesEnum, indexBuffer.getSize(), GLES30.GL_UNSIGNED_INT, 0);
GLError.maybeThrowGLException(
"Failed to draw vertex array object with indices", "glDrawElements");
}
}
}
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.samplerender;
import android.content.res.AssetManager;
import android.opengl.GLES30;
import android.opengl.GLSurfaceView;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/** A SampleRender context. */
public class SampleRender {
private static final String TAG = SampleRender.class.getSimpleName();
private final AssetManager assetManager;
private int viewportWidth = 1;
private int viewportHeight = 1;
/**
* Constructs a SampleRender object and instantiates GLSurfaceView parameters.
*
* @param glSurfaceView Android GLSurfaceView
* @param renderer Renderer implementation to receive callbacks
* @param assetManager AssetManager for loading Android resources
*/
public SampleRender(GLSurfaceView glSurfaceView, Renderer renderer, AssetManager assetManager) {
this.assetManager = assetManager;
glSurfaceView.setPreserveEGLContextOnPause(true);
glSurfaceView.setEGLContextClientVersion(3);
glSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
glSurfaceView.setRenderer(
new GLSurfaceView.Renderer() {
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES30.glEnable(GLES30.GL_BLEND);
GLError.maybeThrowGLException("Failed to enable blending", "glEnable");
renderer.onSurfaceCreated(SampleRender.this);
}
@Override
public void onSurfaceChanged(GL10 gl, int w, int h) {
viewportWidth = w;
viewportHeight = h;
renderer.onSurfaceChanged(SampleRender.this, w, h);
}
@Override
public void onDrawFrame(GL10 gl) {
clear(/*framebuffer=*/ null, 0f, 0f, 0f, 1f);
renderer.onDrawFrame(SampleRender.this);
}
});
glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
glSurfaceView.setWillNotDraw(false);
}
/** Draw a {@link Mesh} with the specified {@link Shader}. */
public void draw(Mesh mesh, Shader shader) {
draw(mesh, shader, /*framebuffer=*/ null);
}
/**
* Draw a {@link Mesh} with the specified {@link Shader} to the given {@link Framebuffer}.
*
* <p>The {@code framebuffer} argument may be null, in which case the default framebuffer is used.
*/
public void draw(Mesh mesh, Shader shader, Framebuffer framebuffer) {
useFramebuffer(framebuffer);
shader.lowLevelUse();
mesh.lowLevelDraw();
}
/**
* Clear the given framebuffer.
*
* <p>The {@code framebuffer} argument may be null, in which case the default framebuffer is
* cleared.
*/
public void clear(Framebuffer framebuffer, float r, float g, float b, float a) {
useFramebuffer(framebuffer);
GLES30.glClearColor(r, g, b, a);
GLError.maybeThrowGLException("Failed to set clear color", "glClearColor");
GLES30.glDepthMask(true);
GLError.maybeThrowGLException("Failed to set depth write mask", "glDepthMask");
GLES30.glClear(GLES30.GL_COLOR_BUFFER_BIT | GLES30.GL_DEPTH_BUFFER_BIT);
GLError.maybeThrowGLException("Failed to clear framebuffer", "glClear");
}
/** Interface to be implemented for rendering callbacks. */
public static interface Renderer {
/**
* Called by {@link SampleRender} when the GL render surface is created.
*
* <p>See {@link GLSurfaceView.Renderer#onSurfaceCreated}.
*/
public void onSurfaceCreated(SampleRender render);
/**
* Called by {@link SampleRender} when the GL render surface dimensions are changed.
*
* <p>See {@link GLSurfaceView.Renderer#onSurfaceChanged}.
*/
public void onSurfaceChanged(SampleRender render, int width, int height);
/**
* Called by {@link SampleRender} when a GL frame is to be rendered.
*
* <p>See {@link GLSurfaceView.Renderer#onDrawFrame}.
*/
public void onDrawFrame(SampleRender render);
}
/* package-private */
AssetManager getAssets() {
return assetManager;
}
private void useFramebuffer(Framebuffer framebuffer) {
int framebufferId;
int viewportWidth;
int viewportHeight;
if (framebuffer == null) {
framebufferId = 0;
viewportWidth = this.viewportWidth;
viewportHeight = this.viewportHeight;
} else {
framebufferId = framebuffer.getFramebufferId();
viewportWidth = framebuffer.getWidth();
viewportHeight = framebuffer.getHeight();
}
GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, framebufferId);
GLError.maybeThrowGLException("Failed to bind framebuffer", "glBindFramebuffer");
GLES30.glViewport(0, 0, viewportWidth, viewportHeight);
GLError.maybeThrowGLException("Failed to set viewport dimensions", "glViewport");
}
}
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.samplerender;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES11Ext;
import android.opengl.GLES30;
import android.util.Log;
import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
/** A GPU-side texture. */
public class Texture implements Closeable {
private static final String TAG = Texture.class.getSimpleName();
private final int[] textureId = {0};
private final Target target;
/**
* Describes the way the texture's edges are rendered.
*
* @see <a
* href="https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glTexParameter.xhtml">GL_TEXTURE_WRAP_S</a>.
*/
public enum WrapMode {
CLAMP_TO_EDGE(GLES30.GL_CLAMP_TO_EDGE),
MIRRORED_REPEAT(GLES30.GL_MIRRORED_REPEAT),
REPEAT(GLES30.GL_REPEAT);
/* package-private */
final int glesEnum;
private WrapMode(int glesEnum) {
this.glesEnum = glesEnum;
}
}
/**
* Describes the target this texture is bound to.
*
* @see <a
* href="https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glBindTexture.xhtml">glBindTexture</a>.
*/
public enum Target {
TEXTURE_2D(GLES30.GL_TEXTURE_2D),
TEXTURE_EXTERNAL_OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES),
TEXTURE_CUBE_MAP(GLES30.GL_TEXTURE_CUBE_MAP);
final int glesEnum;
private Target(int glesEnum) {
this.glesEnum = glesEnum;
}
}
/**
* Describes the color format of the texture.
*
* @see <a
* href="https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glTexImage2D.xhtml">glTexImage2d</a>.
*/
public enum ColorFormat {
LINEAR(GLES30.GL_RGBA8),
SRGB(GLES30.GL_SRGB8_ALPHA8);
final int glesEnum;
private ColorFormat(int glesEnum) {
this.glesEnum = glesEnum;
}
}
/**
* Construct an empty {@link Texture}.
*
* <p>Since {@link Texture}s created in this way are not populated with data, this method is
* mostly only useful for creating {@link Target.TEXTURE_EXTERNAL_OES} textures. See {@link
* #createFromAsset} if you want a texture with data.
*/
public Texture(SampleRender render, Target target, WrapMode wrapMode) {
this(render, target, wrapMode, /*useMipmaps=*/ true);
}
public Texture(SampleRender render, Target target, WrapMode wrapMode, boolean useMipmaps) {
this.target = target;
GLES30.glGenTextures(1, textureId, 0);
GLError.maybeThrowGLException("Texture creation failed", "glGenTextures");
int minFilter = useMipmaps ? GLES30.GL_LINEAR_MIPMAP_LINEAR : GLES30.GL_LINEAR;
try {
GLES30.glBindTexture(target.glesEnum, textureId[0]);
GLError.maybeThrowGLException("Failed to bind texture", "glBindTexture");
GLES30.glTexParameteri(target.glesEnum, GLES30.GL_TEXTURE_MIN_FILTER, minFilter);
GLError.maybeThrowGLException("Failed to set texture parameter", "glTexParameteri");
GLES30.glTexParameteri(target.glesEnum, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR);
GLError.maybeThrowGLException("Failed to set texture parameter", "glTexParameteri");
GLES30.glTexParameteri(target.glesEnum, GLES30.GL_TEXTURE_WRAP_S, wrapMode.glesEnum);
GLError.maybeThrowGLException("Failed to set texture parameter", "glTexParameteri");
GLES30.glTexParameteri(target.glesEnum, GLES30.GL_TEXTURE_WRAP_T, wrapMode.glesEnum);
GLError.maybeThrowGLException("Failed to set texture parameter", "glTexParameteri");
} catch (Throwable t) {
close();
throw t;
}
}
/** Create a texture from the given asset file name. */
public static Texture createFromAsset(
SampleRender render, String assetFileName, WrapMode wrapMode, ColorFormat colorFormat)
throws IOException {
Texture texture = new Texture(render, Target.TEXTURE_2D, wrapMode);
Bitmap bitmap = null;
try {
// The following lines up to glTexImage2D could technically be replaced with
// GLUtils.texImage2d, but this method does not allow for loading sRGB images.
// Load and convert the bitmap and copy its contents to a direct ByteBuffer. Despite its name,
// the ARGB_8888 config is actually stored in RGBA order.
bitmap =
convertBitmapToConfig(
BitmapFactory.decodeStream(render.getAssets().open(assetFileName)),
Bitmap.Config.ARGB_8888);
ByteBuffer buffer = ByteBuffer.allocateDirect(bitmap.getByteCount());
bitmap.copyPixelsToBuffer(buffer);
buffer.rewind();
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, texture.getTextureId());
GLError.maybeThrowGLException("Failed to bind texture", "glBindTexture");
GLES30.glTexImage2D(
GLES30.GL_TEXTURE_2D,
/*level=*/ 0,
colorFormat.glesEnum,
bitmap.getWidth(),
bitmap.getHeight(),
/*border=*/ 0,
GLES30.GL_RGBA,
GLES30.GL_UNSIGNED_BYTE,
buffer);
GLError.maybeThrowGLException("Failed to populate texture data", "glTexImage2D");
GLES30.glGenerateMipmap(GLES30.GL_TEXTURE_2D);
GLError.maybeThrowGLException("Failed to generate mipmaps", "glGenerateMipmap");
} catch (Throwable t) {
texture.close();
throw t;
} finally {
if (bitmap != null) {
bitmap.recycle();
}
}
return texture;
}
@Override
public void close() {
if (textureId[0] != 0) {
GLES30.glDeleteTextures(1, textureId, 0);
GLError.maybeLogGLError(Log.WARN, TAG, "Failed to free texture", "glDeleteTextures");
textureId[0] = 0;
}
}
/** Retrieve the native texture ID. */
public int getTextureId() {
return textureId[0];
}
/* package-private */
Target getTarget() {
return target;
}
private static Bitmap convertBitmapToConfig(Bitmap bitmap, Bitmap.Config config) {
// We use this method instead of BitmapFactory.Options.outConfig to support a minimum of Android
// API level 24.
if (bitmap.getConfig() == config) {
return bitmap;
}
Bitmap result = bitmap.copy(config, /*isMutable=*/ false);
bitmap.recycle();
return result;
}
}
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.thetrek.common.samplerender;
import android.opengl.GLES30;
import java.io.Closeable;
import java.nio.FloatBuffer;
/**
* A list of vertex attribute data stored GPU-side.
*
* <p>One or more {@link VertexBuffer}s are used when constructing a {@link Mesh} to describe vertex
* attribute data; for example, local coordinates, texture coordinates, vertex normals, etc.
*
* @see <a
* href="https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glVertexAttribPointer.xhtml">glVertexAttribPointer</a>
*/
public class VertexBuffer implements Closeable {
private final GpuBuffer buffer;
private final int numberOfEntriesPerVertex;
/**
* Construct a {@link VertexBuffer} populated with initial data.
*
* <p>The GPU buffer will be filled with the data in the <i>direct</i> buffer {@code entries},
* starting from the beginning of the buffer (not the current cursor position). The cursor will be
* left in an undefined position after this function returns.
*
* <p>The number of vertices in the buffer can be expressed as {@code entries.limit() /
* numberOfEntriesPerVertex}. Thus, The size of the buffer must be divisible by {@code
* numberOfEntriesPerVertex}.
*
* <p>The {@code entries} buffer may be null, in which case an empty buffer is constructed
* instead.
*/
public VertexBuffer(SampleRender render, int numberOfEntriesPerVertex, FloatBuffer entries) {
if (entries != null && entries.limit() % numberOfEntriesPerVertex != 0) {
throw new IllegalArgumentException(
"If non-null, vertex buffer data must be divisible by the number of data points per"
+ " vertex");
}
this.numberOfEntriesPerVertex = numberOfEntriesPerVertex;
buffer = new GpuBuffer(GLES30.GL_ARRAY_BUFFER, GpuBuffer.FLOAT_SIZE, entries);
}
/**
* Populate with new data.
*
* <p>The entire buffer is replaced by the contents of the <i>direct</i> buffer {@code entries}
* starting from the beginning of the buffer, not the current cursor position. The cursor will be
* left in an undefined position after this function returns.
*
* <p>The GPU buffer is reallocated automatically if necessary.
*
* <p>The {@code entries} buffer may be null, in which case the buffer will become empty.
* Otherwise, the size of {@code entries} must be divisible by the number of entries per vertex
* specified during construction.
*/
public void set(FloatBuffer entries) {
if (entries != null && entries.limit() % numberOfEntriesPerVertex != 0) {
throw new IllegalArgumentException(
"If non-null, vertex buffer data must be divisible by the number of data points per"
+ " vertex");
}
buffer.set(entries);
}
@Override
public void close() {
buffer.free();
}
/* package-private */
int getBufferId() {
return buffer.getBufferId();
}
/* package-private */
int getNumberOfEntriesPerVertex() {
return numberOfEntriesPerVertex;
}
/* package-private */
int getNumberOfVertices() {
return buffer.getSize() / numberOfEntriesPerVertex;
}
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment