Commit b15f897a authored by Shalitha Deshan Jayasekara's avatar Shalitha Deshan Jayasekara 🏘

Merge branch 'master' into 'IT18523256_GunarathnaK.A.G.I.P.T'

# Conflicts:
#   .idea/misc.xml
parents e7d98e20 3a84bd31
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# code editor file
.idea
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# Default ignored files
/workspace.xml
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7" project-jdk-type="Python SDK" />
<component name="PyCharmProfessionalAdvertiser">
<option name="shown" value="true" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/2021-210.iml" filepath="$PROJECT_DIR$/.idea/2021-210.iml" />
</modules>
</component>
</project>
\ No newline at end of file
# This file is used by the build system to adjust CSS and JS output to support the specified browsers below.
# For additional information regarding the format and rule options, please see:
# https://github.com/browserslist/browserslist#queries
# For the full list of supported browsers by the Angular framework, please see:
# https://angular.io/guide/browser-support
# You can see what browsers were selected by your queries by running:
# npx browserslist
last 1 Chrome version
last 1 Firefox version
last 2 Edge major versions
last 2 Safari major versions
last 2 iOS major versions
Firefox ESR
not IE 11 # Angular supports IE 11 only as an opt-in. To opt-in, remove the 'not' prefix on this line.
# Editor configuration, see https://editorconfig.org
root = true
[*]
charset = utf-8
indent_style = space
indent_size = 2
insert_final_newline = true
trim_trailing_whitespace = true
[*.ts]
quote_type = single
[*.md]
max_line_length = off
trim_trailing_whitespace = false
{
"root": true,
"ignorePatterns": ["projects/**/*"],
"overrides": [
{
"files": ["*.ts"],
"parserOptions": {
"project": ["tsconfig.json", "e2e/tsconfig.json"],
"createDefaultProgram": true
},
"extends": [
"plugin:@angular-eslint/ng-cli-compat",
"plugin:@angular-eslint/ng-cli-compat--formatting-add-on",
"plugin:@angular-eslint/template/process-inline-templates"
],
"rules": {
"@angular-eslint/component-class-suffix": [
"error",
{
"suffixes": ["Page", "Component"]
}
],
"@angular-eslint/component-selector": [
"error",
{
"type": "element",
"prefix": "app",
"style": "kebab-case"
}
],
"@angular-eslint/directive-selector": [
"error",
{
"type": "attribute",
"prefix": "app",
"style": "camelCase"
}
]
}
},
{
"files": ["*.html"],
"extends": ["plugin:@angular-eslint/template/recommended"],
"rules": {}
}
]
}
# Specifies intentionally untracked files to ignore when using Git
# http://git-scm.com/docs/gitignore
*~
*.sw[mnpcod]
.tmp
*.tmp
*.tmp.*
*.sublime-project
*.sublime-workspace
.DS_Store
Thumbs.db
UserInterfaceState.xcuserstate
$RECYCLE.BIN/
*.log
log.txt
npm-debug.log*
/.idea
/.ionic
/.sass-cache
/.sourcemaps
/.versions
/.vscode
/coverage
/dist
/node_modules
/platforms
/plugins
/www
{
"$schema": "./node_modules/@angular/cli/lib/config/schema.json",
"version": 1,
"defaultProject": "app",
"newProjectRoot": "projects",
"projects": {
"app": {
"root": "",
"sourceRoot": "src",
"projectType": "application",
"prefix": "app",
"schematics": {},
"architect": {
"build": {
"builder": "@angular-devkit/build-angular:browser",
"options": {
"outputPath": "www",
"index": "src/index.html",
"main": "src/main.ts",
"polyfills": "src/polyfills.ts",
"tsConfig": "tsconfig.app.json",
"assets": [
{
"glob": "**/*",
"input": "src/assets",
"output": "assets"
},
{
"glob": "**/*.svg",
"input": "node_modules/ionicons/dist/ionicons/svg",
"output": "./svg"
}
],
"styles": ["src/theme/variables.scss", "src/global.scss"],
"scripts": [],
"aot": false,
"vendorChunk": true,
"extractLicenses": false,
"buildOptimizer": false,
"sourceMap": true,
"optimization": false,
"namedChunks": true
},
"configurations": {
"production": {
"fileReplacements": [
{
"replace": "src/environments/environment.ts",
"with": "src/environments/environment.prod.ts"
}
],
"optimization": true,
"outputHashing": "all",
"sourceMap": false,
"namedChunks": false,
"aot": true,
"extractLicenses": true,
"vendorChunk": false,
"buildOptimizer": true,
"budgets": [
{
"type": "initial",
"maximumWarning": "2mb",
"maximumError": "5mb"
}
]
},
"ci": {
"progress": false
}
}
},
"serve": {
"builder": "@angular-devkit/build-angular:dev-server",
"options": {
"browserTarget": "app:build"
},
"configurations": {
"production": {
"browserTarget": "app:build:production"
},
"ci": {
"progress": false
}
}
},
"extract-i18n": {
"builder": "@angular-devkit/build-angular:extract-i18n",
"options": {
"browserTarget": "app:build"
}
},
"test": {
"builder": "@angular-devkit/build-angular:karma",
"options": {
"main": "src/test.ts",
"polyfills": "src/polyfills.ts",
"tsConfig": "tsconfig.spec.json",
"karmaConfig": "karma.conf.js",
"styles": [],
"scripts": [],
"assets": [
{
"glob": "favicon.ico",
"input": "src/",
"output": "/"
},
{
"glob": "**/*",
"input": "src/assets",
"output": "/assets"
}
]
},
"configurations": {
"ci": {
"progress": false,
"watch": false
}
}
},
"lint": {
"builder": "@angular-eslint/builder:lint",
"options": {
"lintFilePatterns": [
"src/**/*.ts",
"src/**/*.html"
]
}
},
"e2e": {
"builder": "@angular-devkit/build-angular:protractor",
"options": {
"protractorConfig": "e2e/protractor.conf.js",
"devServerTarget": "app:serve"
},
"configurations": {
"production": {
"devServerTarget": "app:serve:production"
},
"ci": {
"devServerTarget": "app:serve:ci"
}
}
},
"ionic-cordova-build": {
"builder": "@ionic/angular-toolkit:cordova-build",
"options": {
"browserTarget": "app:build"
},
"configurations": {
"production": {
"browserTarget": "app:build:production"
}
}
},
"ionic-cordova-serve": {
"builder": "@ionic/angular-toolkit:cordova-serve",
"options": {
"cordovaBuildTarget": "app:ionic-cordova-build",
"devServerTarget": "app:serve"
},
"configurations": {
"production": {
"cordovaBuildTarget": "app:ionic-cordova-build:production",
"devServerTarget": "app:serve:production"
}
}
}
}
}
},
"cli": {
"defaultCollection": "@ionic/angular-toolkit"
},
"schematics": {
"@ionic/angular-toolkit:component": {
"styleext": "scss"
},
"@ionic/angular-toolkit:page": {
"styleext": "scss"
}
}
}
import { CapacitorConfig } from '@capacitor/cli';
const config: CapacitorConfig = {
appId: 'io.ionic.starter',
appName: 'ClientApp',
webDir: 'www',
bundledWebRuntime: false
};
export default config;
// @ts-check
// Protractor configuration file, see link for more information
// https://github.com/angular/protractor/blob/master/lib/config.ts
const { SpecReporter, StacktraceOption } = require('jasmine-spec-reporter');
/**
* @type { import("protractor").Config }
*/
exports.config = {
allScriptsTimeout: 11000,
specs: [
'./src/**/*.e2e-spec.ts'
],
capabilities: {
browserName: 'chrome'
},
directConnect: true,
SELENIUM_PROMISE_MANAGER: false,
baseUrl: 'http://localhost:4200/',
framework: 'jasmine',
jasmineNodeOpts: {
showColors: true,
defaultTimeoutInterval: 30000,
print: function() {}
},
onPrepare() {
require('ts-node').register({
project: require('path').join(__dirname, './tsconfig.json')
});
jasmine.getEnv().addReporter(new SpecReporter({
spec: {
displayStacktrace: StacktraceOption.PRETTY
}
}));
}
};
import { AppPage } from './app.po';
describe('new App', () => {
let page: AppPage;
beforeEach(() => {
page = new AppPage();
});
it('should be blank', () => {
page.navigateTo();
expect(page.getParagraphText()).toContain('Start with Ionic UI Components');
});
});
import { browser, by, element } from 'protractor';
export class AppPage {
navigateTo() {
return browser.get('/');
}
getParagraphText() {
return element(by.deepCss('app-root ion-content')).getText();
}
}
{
"extends": "../tsconfig.json",
"compilerOptions": {
"outDir": "../out-tsc/e2e",
"module": "commonjs",
"target": "es2018",
"types": [
"jasmine",
"node"
]
}
}
{
"name": "ClientApp",
"integrations": {
"capacitor": {}
},
"type": "angular"
}
// Karma configuration file, see link for more information
// https://karma-runner.github.io/1.0/config/configuration-file.html
module.exports = function (config) {
config.set({
basePath: '',
frameworks: ['jasmine', '@angular-devkit/build-angular'],
plugins: [
require('karma-jasmine'),
require('karma-chrome-launcher'),
require('karma-jasmine-html-reporter'),
require('karma-coverage'),
require('@angular-devkit/build-angular/plugins/karma')
],
client: {
jasmine: {
// you can add configuration options for Jasmine here
// the possible options are listed at https://jasmine.github.io/api/edge/Configuration.html
// for example, you can disable the random execution with `random: false`
// or set a specific seed with `seed: 4321`
},
clearContext: false // leave Jasmine Spec Runner output visible in browser
},
jasmineHtmlReporter: {
suppressAll: true // removes the duplicated traces
},
coverageReporter: {
dir: require('path').join(__dirname, './coverage/ngv'),
subdir: '.',
reporters: [
{ type: 'html' },
{ type: 'text-summary' }
]
},
reporters: ['progress', 'kjhtml'],
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: ['Chrome'],
singleRun: false,
restartOnFileChange: true
});
};
This diff is collapsed.
{
"name": "ClientApp",
"version": "0.0.1",
"author": "Ionic Framework",
"homepage": "https://ionicframework.com/",
"scripts": {
"ng": "ng",
"start": "ng serve",
"build": "ng build",
"test": "ng test",
"lint": "ng lint",
"e2e": "ng e2e"
},
"private": true,
"dependencies": {
"@angular/common": "~12.1.1",
"@angular/compiler": "~12.1.1",
"@angular/core": "~12.1.1",
"@angular/forms": "~12.1.1",
"@angular/platform-browser": "~12.1.1",
"@angular/platform-browser-dynamic": "~12.1.1",
"@angular/router": "~12.1.1",
"@capacitor/app": "1.0.2",
"@capacitor/core": "3.2.0",
"@capacitor/haptics": "1.0.2",
"@capacitor/keyboard": "1.0.2",
"@capacitor/status-bar": "1.0.2",
"@ionic/angular": "^5.5.2",
"rxjs": "~6.6.0",
"tslib": "^2.2.0",
"zone.js": "~0.11.4"
},
"devDependencies": {
"@angular-devkit/build-angular": "~12.1.1",
"@angular-eslint/builder": "~12.0.0",
"@angular-eslint/eslint-plugin": "~12.0.0",
"@angular-eslint/eslint-plugin-template": "~12.0.0",
"@angular-eslint/template-parser": "~12.0.0",
"@angular/cli": "~12.1.1",
"@angular/compiler": "~12.1.1",
"@angular/compiler-cli": "~12.1.1",
"@angular/language-service": "~12.0.1",
"@capacitor/cli": "3.2.0",
"@ionic/angular-toolkit": "^4.0.0",
"@types/jasmine": "~3.6.0",
"@types/jasminewd2": "~2.0.3",
"@types/node": "^12.11.1",
"@typescript-eslint/eslint-plugin": "4.16.1",
"@typescript-eslint/parser": "4.16.1",
"eslint": "^7.6.0",
"eslint-plugin-import": "2.22.1",
"eslint-plugin-jsdoc": "30.7.6",
"eslint-plugin-prefer-arrow": "1.2.2",
"jasmine-core": "~3.8.0",
"jasmine-spec-reporter": "~5.0.0",
"karma": "~6.3.2",
"karma-chrome-launcher": "~3.1.0",
"karma-coverage": "~2.0.3",
"karma-coverage-istanbul-reporter": "~3.0.2",
"karma-jasmine": "~4.0.0",
"karma-jasmine-html-reporter": "^1.5.0",
"protractor": "~7.0.0",
"ts-node": "~8.3.0",
"typescript": "~4.2.4"
},
"description": "An Ionic project"
}
import { NgModule } from '@angular/core';
import { PreloadAllModules, RouterModule, Routes } from '@angular/router';
const routes: Routes = [
{
path: 'home',
loadChildren: () => import('./home/home.module').then( m => m.HomePageModule)
},
{
path: '',
redirectTo: 'home',
pathMatch: 'full'
},
];
@NgModule({
imports: [
RouterModule.forRoot(routes, { preloadingStrategy: PreloadAllModules })
],
exports: [RouterModule]
})
export class AppRoutingModule { }
<ion-app>
<ion-router-outlet></ion-router-outlet>
</ion-app>
import { CUSTOM_ELEMENTS_SCHEMA } from '@angular/core';
import { TestBed, waitForAsync } from '@angular/core/testing';
import { AppComponent } from './app.component';
describe('AppComponent', () => {
beforeEach(waitForAsync(() => {
TestBed.configureTestingModule({
declarations: [AppComponent],
schemas: [CUSTOM_ELEMENTS_SCHEMA],
}).compileComponents();
}));
it('should create the app', () => {
const fixture = TestBed.createComponent(AppComponent);
const app = fixture.debugElement.componentInstance;
expect(app).toBeTruthy();
});
// TODO: add more tests!
});
import { Component } from '@angular/core';
@Component({
selector: 'app-root',
templateUrl: 'app.component.html',
styleUrls: ['app.component.scss'],
})
export class AppComponent {
constructor() {}
}
import { NgModule } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';
import { RouteReuseStrategy } from '@angular/router';
import { IonicModule, IonicRouteStrategy } from '@ionic/angular';
import { AppComponent } from './app.component';
import { AppRoutingModule } from './app-routing.module';
@NgModule({
declarations: [AppComponent],
entryComponents: [],
imports: [BrowserModule, IonicModule.forRoot(), AppRoutingModule],
providers: [{ provide: RouteReuseStrategy, useClass: IonicRouteStrategy }],
bootstrap: [AppComponent],
})
export class AppModule {}
import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { HomePage } from './home.page';
const routes: Routes = [
{
path: '',
component: HomePage,
}
];
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule]
})
export class HomePageRoutingModule {}
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { IonicModule } from '@ionic/angular';
import { FormsModule } from '@angular/forms';
import { HomePage } from './home.page';
import { HomePageRoutingModule } from './home-routing.module';
@NgModule({
imports: [
CommonModule,
FormsModule,
IonicModule,
HomePageRoutingModule
],
declarations: [HomePage]
})
export class HomePageModule {}
#container {
text-align: center;
position: absolute;
left: 0;
right: 0;
top: 50%;
transform: translateY(-50%);
}
#container strong {
font-size: 20px;
line-height: 26px;
}
#container p {
font-size: 16px;
line-height: 22px;
color: #8c8c8c;
margin: 0;
}
#container a {
text-decoration: none;
}
\ No newline at end of file
import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing';
import { IonicModule } from '@ionic/angular';
import { HomePage } from './home.page';
describe('HomePage', () => {
let component: HomePage;
let fixture: ComponentFixture<HomePage>;
beforeEach(waitForAsync(() => {
TestBed.configureTestingModule({
declarations: [ HomePage ],
imports: [IonicModule.forRoot()]
}).compileComponents();
fixture = TestBed.createComponent(HomePage);
component = fixture.componentInstance;
fixture.detectChanges();
}));
it('should create', () => {
expect(component).toBeTruthy();
});
});
import { Component } from '@angular/core';
@Component({
selector: 'app-home',
templateUrl: 'home.page.html',
styleUrls: ['home.page.scss'],
})
export class HomePage {
constructor() {}
}
<svg width="350" height="140" xmlns="http://www.w3.org/2000/svg" style="background:#f6f7f9"><g fill="none" fill-rule="evenodd"><path fill="#F04141" style="mix-blend-mode:multiply" d="M61.905-34.23l96.194 54.51-66.982 54.512L22 34.887z"/><circle fill="#10DC60" style="mix-blend-mode:multiply" cx="155.5" cy="135.5" r="57.5"/><path fill="#3880FF" style="mix-blend-mode:multiply" d="M208.538 9.513l84.417 15.392L223.93 93.93z"/><path fill="#FFCE00" style="mix-blend-mode:multiply" d="M268.625 106.557l46.332-26.75 46.332 26.75v53.5l-46.332 26.75-46.332-26.75z"/><circle fill="#7044FF" style="mix-blend-mode:multiply" cx="299.5" cy="9.5" r="38.5"/><rect fill="#11D3EA" style="mix-blend-mode:multiply" transform="rotate(-60 148.47 37.886)" x="143.372" y="-7.056" width="10.196" height="89.884" rx="5.098"/><path d="M-25.389 74.253l84.86 8.107c5.498.525 9.53 5.407 9.004 10.905a10 10 0 0 1-.057.477l-12.36 85.671a10.002 10.002 0 0 1-11.634 8.42l-86.351-15.226c-5.44-.959-9.07-6.145-8.112-11.584l13.851-78.551a10 10 0 0 1 10.799-8.219z" fill="#7044FF" style="mix-blend-mode:multiply"/><circle fill="#0CD1E8" style="mix-blend-mode:multiply" cx="273.5" cy="106.5" r="20.5"/></g></svg>
export const environment = {
production: true
};
// This file can be replaced during build by using the `fileReplacements` array.
// `ng build --prod` replaces `environment.ts` with `environment.prod.ts`.
// The list of file replacements can be found in `angular.json`.
export const environment = {
production: false
};
/*
* For easier debugging in development mode, you can import the following file
* to ignore zone related error stack frames such as `zone.run`, `zoneDelegate.invokeTask`.
*
* This import should be commented out in production mode because it will have a negative impact
* on performance if an error is thrown.
*/
// import 'zone.js/dist/zone-error'; // Included with Angular CLI.
/*
* App Global CSS
* ----------------------------------------------------------------------------
* Put style rules here that you want to apply globally. These styles are for
* the entire app and not just one component. Additionally, this file can be
* used as an entry point to import other CSS/Sass files to be included in the
* output CSS.
* For more information on global stylesheets, visit the documentation:
* https://ionicframework.com/docs/layout/global-stylesheets
*/
/* Core CSS required for Ionic components to work properly */
@import "~@ionic/angular/css/core.css";
/* Basic CSS for apps built with Ionic */
@import "~@ionic/angular/css/normalize.css";
@import "~@ionic/angular/css/structure.css";
@import "~@ionic/angular/css/typography.css";
@import '~@ionic/angular/css/display.css';
/* Optional CSS utils that can be commented out */
@import "~@ionic/angular/css/padding.css";
@import "~@ionic/angular/css/float-elements.css";
@import "~@ionic/angular/css/text-alignment.css";
@import "~@ionic/angular/css/text-transformation.css";
@import "~@ionic/angular/css/flex-utils.css";
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<title>Ionic App</title>
<base href="/" />
<meta name="color-scheme" content="light dark" />
<meta name="viewport" content="viewport-fit=cover, width=device-width, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, user-scalable=no" />
<meta name="format-detection" content="telephone=no" />
<meta name="msapplication-tap-highlight" content="no" />
<link rel="icon" type="image/png" href="assets/icon/favicon.png" />
<!-- add to homescreen for ios -->
<meta name="apple-mobile-web-app-capable" content="yes" />
<meta name="apple-mobile-web-app-status-bar-style" content="black" />
</head>
<body>
<app-root></app-root>
</body>
</html>
import { enableProdMode } from '@angular/core';
import { platformBrowserDynamic } from '@angular/platform-browser-dynamic';
import { AppModule } from './app/app.module';
import { environment } from './environments/environment';
if (environment.production) {
enableProdMode();
}
platformBrowserDynamic().bootstrapModule(AppModule)
.catch(err => console.log(err));
/**
* This file includes polyfills needed by Angular and is loaded before the app.
* You can add your own extra polyfills to this file.
*
* This file is divided into 2 sections:
* 1. Browser polyfills. These are applied before loading ZoneJS and are sorted by browsers.
* 2. Application imports. Files imported after ZoneJS that should be loaded before your main
* file.
*
* The current setup is for so-called "evergreen" browsers; the last versions of browsers that
* automatically update themselves. This includes Safari >= 10, Chrome >= 55 (including Opera),
* Edge >= 13 on the desktop, and iOS 10 and Chrome on mobile.
*
* Learn more in https://angular.io/guide/browser-support
*/
/***************************************************************************************************
* BROWSER POLYFILLS
*/
/** IE11 requires the following for NgClass support on SVG elements */
// import 'classlist.js'; // Run `npm install --save classlist.js`.
/**
* Web Animations `@angular/platform-browser/animations`
* Only required if AnimationBuilder is used within the application and using IE/Edge or Safari.
* Standard animation support in Angular DOES NOT require any polyfills (as of Angular 6.0).
*/
// import 'web-animations-js'; // Run `npm install --save web-animations-js`.
/**
* By default, zone.js will patch all possible macroTask and DomEvents
* user can disable parts of macroTask/DomEvents patch by setting following flags
* because those flags need to be set before `zone.js` being loaded, and webpack
* will put import in the top of bundle, so user need to create a separate file
* in this directory (for example: zone-flags.ts), and put the following flags
* into that file, and then add the following code before importing zone.js.
* import './zone-flags';
*
* The flags allowed in zone-flags.ts are listed here.
*
* The following flags will work for all browsers.
*
* (window as any).__Zone_disable_requestAnimationFrame = true; // disable patch requestAnimationFrame
* (window as any).__Zone_disable_on_property = true; // disable patch onProperty such as onclick
* (window as any).__zone_symbol__UNPATCHED_EVENTS = ['scroll', 'mousemove']; // disable patch specified eventNames
*
* in IE/Edge developer tools, the addEventListener will also be wrapped by zone.js
* with the following flag, it will bypass `zone.js` patch for IE/Edge
*
* (window as any).__Zone_enable_cross_context_check = true;
*
*/
import './zone-flags';
/***************************************************************************************************
* Zone JS is required by default for Angular itself.
*/
import 'zone.js/dist/zone'; // Included with Angular CLI.
/***************************************************************************************************
* APPLICATION IMPORTS
*/
// This file is required by karma.conf.js and loads recursively all the .spec and framework files
import 'zone.js/dist/zone-testing';
import { getTestBed } from '@angular/core/testing';
import {
BrowserDynamicTestingModule,
platformBrowserDynamicTesting
} from '@angular/platform-browser-dynamic/testing';
declare const require: {
context(path: string, deep?: boolean, filter?: RegExp): {
keys(): string[];
<T>(id: string): T;
};
};
// First, initialize the Angular testing environment.
getTestBed().initTestEnvironment(
BrowserDynamicTestingModule,
platformBrowserDynamicTesting()
);
// Then we find all the tests.
const context = require.context('./', true, /\.spec\.ts$/);
// And load the modules.
context.keys().map(context);
// Ionic Variables and Theming. For more info, please see:
// http://ionicframework.com/docs/theming/
/** Ionic CSS Variables **/
:root {
/** primary **/
--ion-color-primary: #3880ff;
--ion-color-primary-rgb: 56, 128, 255;
--ion-color-primary-contrast: #ffffff;
--ion-color-primary-contrast-rgb: 255, 255, 255;
--ion-color-primary-shade: #3171e0;
--ion-color-primary-tint: #4c8dff;
/** secondary **/
--ion-color-secondary: #3dc2ff;
--ion-color-secondary-rgb: 61, 194, 255;
--ion-color-secondary-contrast: #ffffff;
--ion-color-secondary-contrast-rgb: 255, 255, 255;
--ion-color-secondary-shade: #36abe0;
--ion-color-secondary-tint: #50c8ff;
/** tertiary **/
--ion-color-tertiary: #5260ff;
--ion-color-tertiary-rgb: 82, 96, 255;
--ion-color-tertiary-contrast: #ffffff;
--ion-color-tertiary-contrast-rgb: 255, 255, 255;
--ion-color-tertiary-shade: #4854e0;
--ion-color-tertiary-tint: #6370ff;
/** success **/
--ion-color-success: #2dd36f;
--ion-color-success-rgb: 45, 211, 111;
--ion-color-success-contrast: #ffffff;
--ion-color-success-contrast-rgb: 255, 255, 255;
--ion-color-success-shade: #28ba62;
--ion-color-success-tint: #42d77d;
/** warning **/
--ion-color-warning: #ffc409;
--ion-color-warning-rgb: 255, 196, 9;
--ion-color-warning-contrast: #000000;
--ion-color-warning-contrast-rgb: 0, 0, 0;
--ion-color-warning-shade: #e0ac08;
--ion-color-warning-tint: #ffca22;
/** danger **/
--ion-color-danger: #eb445a;
--ion-color-danger-rgb: 235, 68, 90;
--ion-color-danger-contrast: #ffffff;
--ion-color-danger-contrast-rgb: 255, 255, 255;
--ion-color-danger-shade: #cf3c4f;
--ion-color-danger-tint: #ed576b;
/** dark **/
--ion-color-dark: #222428;
--ion-color-dark-rgb: 34, 36, 40;
--ion-color-dark-contrast: #ffffff;
--ion-color-dark-contrast-rgb: 255, 255, 255;
--ion-color-dark-shade: #1e2023;
--ion-color-dark-tint: #383a3e;
/** medium **/
--ion-color-medium: #92949c;
--ion-color-medium-rgb: 146, 148, 156;
--ion-color-medium-contrast: #ffffff;
--ion-color-medium-contrast-rgb: 255, 255, 255;
--ion-color-medium-shade: #808289;
--ion-color-medium-tint: #9d9fa6;
/** light **/
--ion-color-light: #f4f5f8;
--ion-color-light-rgb: 244, 245, 248;
--ion-color-light-contrast: #000000;
--ion-color-light-contrast-rgb: 0, 0, 0;
--ion-color-light-shade: #d7d8da;
--ion-color-light-tint: #f5f6f9;
}
@media (prefers-color-scheme: dark) {
/*
* Dark Colors
* -------------------------------------------
*/
body {
--ion-color-primary: #428cff;
--ion-color-primary-rgb: 66,140,255;
--ion-color-primary-contrast: #ffffff;
--ion-color-primary-contrast-rgb: 255,255,255;
--ion-color-primary-shade: #3a7be0;
--ion-color-primary-tint: #5598ff;
--ion-color-secondary: #50c8ff;
--ion-color-secondary-rgb: 80,200,255;
--ion-color-secondary-contrast: #ffffff;
--ion-color-secondary-contrast-rgb: 255,255,255;
--ion-color-secondary-shade: #46b0e0;
--ion-color-secondary-tint: #62ceff;
--ion-color-tertiary: #6a64ff;
--ion-color-tertiary-rgb: 106,100,255;
--ion-color-tertiary-contrast: #ffffff;
--ion-color-tertiary-contrast-rgb: 255,255,255;
--ion-color-tertiary-shade: #5d58e0;
--ion-color-tertiary-tint: #7974ff;
--ion-color-success: #2fdf75;
--ion-color-success-rgb: 47,223,117;
--ion-color-success-contrast: #000000;
--ion-color-success-contrast-rgb: 0,0,0;
--ion-color-success-shade: #29c467;
--ion-color-success-tint: #44e283;
--ion-color-warning: #ffd534;
--ion-color-warning-rgb: 255,213,52;
--ion-color-warning-contrast: #000000;
--ion-color-warning-contrast-rgb: 0,0,0;
--ion-color-warning-shade: #e0bb2e;
--ion-color-warning-tint: #ffd948;
--ion-color-danger: #ff4961;
--ion-color-danger-rgb: 255,73,97;
--ion-color-danger-contrast: #ffffff;
--ion-color-danger-contrast-rgb: 255,255,255;
--ion-color-danger-shade: #e04055;
--ion-color-danger-tint: #ff5b71;
--ion-color-dark: #f4f5f8;
--ion-color-dark-rgb: 244,245,248;
--ion-color-dark-contrast: #000000;
--ion-color-dark-contrast-rgb: 0,0,0;
--ion-color-dark-shade: #d7d8da;
--ion-color-dark-tint: #f5f6f9;
--ion-color-medium: #989aa2;
--ion-color-medium-rgb: 152,154,162;
--ion-color-medium-contrast: #000000;
--ion-color-medium-contrast-rgb: 0,0,0;
--ion-color-medium-shade: #86888f;
--ion-color-medium-tint: #a2a4ab;
--ion-color-light: #222428;
--ion-color-light-rgb: 34,36,40;
--ion-color-light-contrast: #ffffff;
--ion-color-light-contrast-rgb: 255,255,255;
--ion-color-light-shade: #1e2023;
--ion-color-light-tint: #383a3e;
}
/*
* iOS Dark Theme
* -------------------------------------------
*/
.ios body {
--ion-background-color: #000000;
--ion-background-color-rgb: 0,0,0;
--ion-text-color: #ffffff;
--ion-text-color-rgb: 255,255,255;
--ion-color-step-50: #0d0d0d;
--ion-color-step-100: #1a1a1a;
--ion-color-step-150: #262626;
--ion-color-step-200: #333333;
--ion-color-step-250: #404040;
--ion-color-step-300: #4d4d4d;
--ion-color-step-350: #595959;
--ion-color-step-400: #666666;
--ion-color-step-450: #737373;
--ion-color-step-500: #808080;
--ion-color-step-550: #8c8c8c;
--ion-color-step-600: #999999;
--ion-color-step-650: #a6a6a6;
--ion-color-step-700: #b3b3b3;
--ion-color-step-750: #bfbfbf;
--ion-color-step-800: #cccccc;
--ion-color-step-850: #d9d9d9;
--ion-color-step-900: #e6e6e6;
--ion-color-step-950: #f2f2f2;
--ion-item-background: #000000;
--ion-card-background: #1c1c1d;
}
.ios ion-modal {
--ion-background-color: var(--ion-color-step-100);
--ion-toolbar-background: var(--ion-color-step-150);
--ion-toolbar-border-color: var(--ion-color-step-250);
}
/*
* Material Design Dark Theme
* -------------------------------------------
*/
.md body {
--ion-background-color: #121212;
--ion-background-color-rgb: 18,18,18;
--ion-text-color: #ffffff;
--ion-text-color-rgb: 255,255,255;
--ion-border-color: #222222;
--ion-color-step-50: #1e1e1e;
--ion-color-step-100: #2a2a2a;
--ion-color-step-150: #363636;
--ion-color-step-200: #414141;
--ion-color-step-250: #4d4d4d;
--ion-color-step-300: #595959;
--ion-color-step-350: #656565;
--ion-color-step-400: #717171;
--ion-color-step-450: #7d7d7d;
--ion-color-step-500: #898989;
--ion-color-step-550: #949494;
--ion-color-step-600: #a0a0a0;
--ion-color-step-650: #acacac;
--ion-color-step-700: #b8b8b8;
--ion-color-step-750: #c4c4c4;
--ion-color-step-800: #d0d0d0;
--ion-color-step-850: #dbdbdb;
--ion-color-step-900: #e7e7e7;
--ion-color-step-950: #f3f3f3;
--ion-item-background: #1e1e1e;
--ion-toolbar-background: #1f1f1f;
--ion-tab-bar-background: #1f1f1f;
--ion-card-background: #1e1e1e;
}
}
/**
* Prevents Angular change detection from
* running with certain Web Component callbacks
*/
// eslint-disable-next-line no-underscore-dangle
(window as any).__Zone_disable_customElements = true;
/* To learn more about this file see: https://angular.io/config/tsconfig. */
{
"extends": "./tsconfig.json",
"compilerOptions": {
"outDir": "./out-tsc/app",
"types": []
},
"files": [
"src/main.ts",
"src/polyfills.ts"
],
"include": [
"src/**/*.d.ts"
]
}
/* To learn more about this file see: https://angular.io/config/tsconfig. */
{
"compileOnSave": false,
"compilerOptions": {
"baseUrl": "./",
"outDir": "./dist/out-tsc",
"sourceMap": true,
"declaration": false,
"downlevelIteration": true,
"experimentalDecorators": true,
"moduleResolution": "node",
"importHelpers": true,
"target": "es2015",
"module": "es2020",
"lib": ["es2018", "dom"]
},
"angularCompilerOptions": {
"enableI18nLegacyMessageIdFormat": false,
"strictInjectionParameters": true,
"strictInputAccessModifiers": true,
"strictTemplates": true
}
}
/* To learn more about this file see: https://angular.io/config/tsconfig. */
{
"extends": "./tsconfig.json",
"compilerOptions": {
"outDir": "./out-tsc/spec",
"types": [
"jasmine"
]
},
"files": [
"src/test.ts",
"src/polyfills.ts"
],
"include": [
"src/**/*.spec.ts",
"src/**/*.d.ts"
]
}
import json
import os
from flask import Flask, request
from flask_cors import CORS, cross_origin
import mysql.connector
mydb = mysql.connector.connect(
host="localhost",
user="root",
password="",
database="dog_research"
)
mycursor = mydb.cursor()
app = Flask(__name__)
cors = CORS(app)
uploadPath = "uploads"
@app.route('/predict', methods=['POST'])
@cross_origin()
def uploadImage():
print(os.getcwd())
f = request.files['file']
splits = f.filename.split("/")
print(splits[len(splits) - 1])
# os.remove(os.path.join(uploadPath, "upload.jpg"))
f.save(os.path.join(uploadPath, "upload.jpg"))
@app.route("/signup", methods=['POST'])
def signup():
data = request.get_json()
print(data)
sql = "INSERT INTO users (email, password) VALUES (%s, %s)"
val = (data['email'], data['password'])
mycursor.execute(sql, val)
mydb.commit()
mycursor.execute("SELECT id FROM users ORDER BY id DESC LIMIT 1")
# myresult = mycursor.fetchall()
id = 0
for (id) in mycursor:
# id = row['id']
id = id
print(id)
return {"id": id[0]}
@app.route("/login", methods=['POST'])
def login():
data = request.get_json()
print(data)
id = 0
try:
mycursor.execute(
"SELECT id FROM users WHERE email='" + data['email'] + "' AND password='" + data['password'] + "'")
# myresult = mycursor.fetchall()
for (id) in mycursor:
# id = row['id']
id = id
print(id)
return {"id": id[0]}
except:
return {"id": 0}
@app.route("/addPet", methods=['POST'])
def addPet():
data = request.get_json()
print(data)
sql = "INSERT INTO pets (name, age, weight, gender, breed) VALUES (%s, %s, %s, %s, %s)"
val = (data['name'], data['age'], data['weight'], data['gender'], data['breed'])
mycursor.execute(sql, val)
mydb.commit()
mycursor.execute("SELECT id FROM pets ORDER BY id DESC LIMIT 1")
# myresult = mycursor.fetchall()
id = 0
for (id) in mycursor:
# id = row['id']
id = id
print(id)
return {"id": id[0]}
@app.route("/addQuestion", methods=['POST'])
def addQuestion():
data = request.get_json()
print(data)
sql = "INSERT INTO questions (question, description, tag, votes) VALUES (%s, %s, %s, %s)"
val = (data['question'], data['description'], data['tag'], data['votes'])
mycursor.execute(sql, val)
mydb.commit()
mycursor.execute("SELECT id FROM questions ORDER BY id DESC LIMIT 1")
# myresult = mycursor.fetchall()
id = 0
for (id) in mycursor:
# id = row['id']
id = id
print(id)
return {"id": id[0]}
@app.route("/setQuestionVote", methods=['POST'])
def setQuestionVote():
data = request.get_json()
print(data)
sql = "UPDATE questions SET votes = " + str(data['votes']) + " WHERE id =" + str(data['id'])
mycursor.execute(sql)
mydb.commit()
return {"task": "Updated"}
@app.route("/addReview", methods=['POST'])
def addReview():
data = request.get_json()
print(data)
sql = "INSERT INTO reviews (review, post_id) VALUES (%s, %s)"
val = (data['review'], data['post_id'])
mycursor.execute(sql, val)
mydb.commit()
mycursor.execute("SELECT id FROM reviews ORDER BY id DESC LIMIT 1")
# myresult = mycursor.fetchall()
id = 0
for (id) in mycursor:
# id = row['id']
id = id
print(id)
return {"id": id[0]}
@app.route("/getPets", methods=['GET', 'POST'])
def getPets():
try:
mycursor.execute("SELECT * FROM pets")
myresult = mycursor.fetchall()
return json.dumps(myresult)
except:
return {"id": 0}
@app.route("/getQuestions", methods=['GET', 'POST'])
def getQuestions():
try:
mycursor.execute("SELECT * FROM questions")
myresult = mycursor.fetchall()
return json.dumps(myresult)
except:
return {"id": 0}
@app.route("/getReviews", methods=['GET', 'POST'])
def getReviews():
try:
mycursor.execute("SELECT * FROM reviews")
myresult = mycursor.fetchall()
return json.dumps(myresult)
except:
return {"id": 0}
if __name__ == '__main__':
app.run(debug=True, port=5005, host="0.0.0.0")
import numpy
from flask import Flask
from flask import request
from flask_cors import CORS, cross_origin
import Ontology_scripts
app = Flask(__name__)
cors = CORS(app)
@app.route("/predict", methods =['POST'])
@cross_origin()
def predict():
data = request.get_json()
print(data['title'])
print(data['content'])
prediction = Ontology_scripts.analyzePost(data['title'], data['content'], data['reviews'])
return {"pred": str(prediction)}
if __name__ == '__main__':
app.run(debug=True, port=5002, host="0.0.0.0")
:( = True Negati : Positi = 2647.8 : 1.0
:) = True Positi : Negati = 1272.4 : 1.0
follower = True Positi : Negati = 26.6 : 1.0
bam = True Positi : Negati = 26.3 : 1.0
sad = True Negati : Positi = 25.0 : 1.0
x15 = True Negati : Positi = 23.0 : 1.0
followed = True Negati : Positi = 19.8 : 1.0
appreciate = True Positi : Negati = 19.0 : 1.0
community = True Positi : Negati = 18.3 : 1.0
ugh = True Negati : Positi = 16.3 : 1.0
glad = True Positi : Negati = 15.8 : 1.0
goodnight = True Positi : Negati = 15.0 : 1.0
welcome = True Positi : Negati = 14.6 : 1.0
perfect = True Positi : Negati = 13.0 : 1.0
kill = True Negati : Positi = 13.0 : 1.0
What this tells you is the ratio of occurences in negative to positive, or visa versa, for every word. So here, we can see that the term ":(" appears 2647.8 more times as often in negative reviews as it does in positive reviews.
\ No newline at end of file
import nltk, re, string, random
from nltk.corpus import twitter_samples
from nltk.tag import pos_tag
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.corpus import stopwords
from nltk import FreqDist
from nltk import classify
from nltk import NaiveBayesClassifier
from nltk.tokenize import word_tokenize
from flask import Flask
positive_tweets = twitter_samples.strings('positive_tweets.json')
negative_tweets = twitter_samples.strings('negative_tweets.json')
text = twitter_samples.strings('tweets.20150430-223406.json')
tweet_tokens = twitter_samples.tokenized('positive_tweets.json')
def lemmatize_sentence(tokens):
lemmatizer = WordNetLemmatizer()
lemmatized_sentence = []
for word, tag in pos_tag(tokens):
if tag.startswith('NN'):
pos = 'n'
elif tag.startswith('VB'):
pos = 'v'
else:
pos = 'a'
lemmatized_sentence.append(lemmatizer.lemmatize(word, pos))
return lemmatized_sentence
def remove_noise(tweet_tokens, stop_words = ()):
cleaned_tokens = []
for token, tag in pos_tag(tweet_tokens):
token = re.sub('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+#]|[!*\(\),]|' \
'(?:%[0-9a-fA-F][0-9a-fA-F]))+','', token)
token = re.sub("(@[A-Za-z0-9_]+)","", token)
if tag.startswith("NN"):
pos = 'n'
elif tag.startswith('VB'):
pos = 'v'
else:
pos = 'a'
lemmatizer = WordNetLemmatizer()
token = lemmatizer.lemmatize(token, pos)
if len(token) > 0 and token not in string.punctuation and token.lower() not in stop_words:
cleaned_tokens.append(token.lower())
return cleaned_tokens
stop_words = stopwords.words('english')
positive_tweet_tokens = twitter_samples.tokenized('positive_tweets.json')
negative_tweet_tokens = twitter_samples.tokenized('negative_tweets.json')
positive_cleaned_tokens_list = []
negative_cleaned_tokens_list = []
for tokens in positive_tweet_tokens:
positive_cleaned_tokens_list.append(remove_noise(tokens, stop_words))
for tokens in negative_tweet_tokens:
negative_cleaned_tokens_list.append(remove_noise(tokens, stop_words))
def get_all_words(cleaned_tokens_list):
for tokens in cleaned_tokens_list:
for token in tokens:
yield token
all_pos_words = get_all_words(positive_cleaned_tokens_list)
freq_dist_pos = FreqDist(all_pos_words)
def get_words_for_model(cleaned_tokens_list):
for tweet_tokens in cleaned_tokens_list:
yield dict([token, True] for token in tweet_tokens)
positive_tokens_for_model = get_words_for_model(positive_cleaned_tokens_list)
negative_tokens_for_model = get_words_for_model(negative_cleaned_tokens_list)
positive_dataset = [(tweet_dict, "Positive")
for tweet_dict in positive_tokens_for_model]
negative_dataset = [(tweet_dict, "Negative")
for tweet_dict in negative_tokens_for_model]
dataset = positive_dataset + negative_dataset
random.shuffle(dataset)
train_data = dataset[:7000]
test_data = dataset[7000:]
classifier = NaiveBayesClassifier.train(train_data)
def analyzePost(title, content, reviews):
points = 0
title_tokens = remove_noise(word_tokenize(title))
titleSentiment = classifier.classify(dict([token, True] for token in title_tokens))
if titleSentiment == "Positive":
points += 2
else:
points -= 0
content_tokens = remove_noise(word_tokenize(content))
contentSentiment = classifier.classify(dict([token, True] for token in content_tokens))
if contentSentiment == "Positive":
points += 5
else:
points -= 0
points += len(reviews)
for review in reviews:
review_tokens = remove_noise(word_tokenize(review))
reviewSentiment = classifier.classify(dict([token, True] for token in review_tokens))
if reviewSentiment == "Positive":
points += 1
else:
points -= 0
return points
# print(analyzePost("watch your dog's health", "bvjkdn dbfjksdn", ["gdfsdff"]))
# 2021-210
"BEST BARK" - Dog care and owner consultation system
# Ontology
### 2021-05-25
learn python
### 2021-06-05
learn Neuro-linguistic programing (NLP)
### 2021-06-10
create phython workplace
### 2021-06-11
Update workspace
### 2021-08-11
Add graph that need to research paper
<!-- Write ontology scripts to module -->
import nltk, re, string, random
# Downloading words data
nltk.download('twitter_samples')
nltk.download('punkt')
nltk.download('wordnet')
nltk.download('averaged_perceptron_tagger')
nltk.download('stopwords')
# Importing required libraries and dependencies
from nltk.corpus import twitter_samples
from nltk.tag import pos_tag
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.corpus import stopwords
from nltk import FreqDist
from nltk import classify
from nltk import NaiveBayesClassifier
from nltk.tokenize import word_tokenize
from flask import Flask
from flask import request
app = Flask(__name__)
positive_tweets = twitter_samples.strings('positive_tweets.json')
negative_tweets = twitter_samples.strings('negative_tweets.json')
text = twitter_samples.strings('tweets.20150430-223406.json')
tweet_tokens = twitter_samples.tokenized('positive_tweets.json')
# print(tweet_tokens[0])
# print(pos_tag(tweet_tokens[0]))
def lemmatize_sentence(tokens):
lemmatizer = WordNetLemmatizer()
lemmatized_sentence = []
# TAG words with NLTK POS tagger : https://www.nltk.org/book/ch05.html
for word, tag in pos_tag(tokens):
if tag.startswith('NN'):
pos = 'n'
elif tag.startswith('VB'):
pos = 'v'
else:
pos = 'a'
lemmatized_sentence.append(lemmatizer.lemmatize(word, pos))
return lemmatized_sentence
# print(tweet_tokens[0])
# print(lemmatize_sentence(tweet_tokens[0]))
def remove_noise(tweet_tokens, stop_words = ()):
cleaned_tokens = []
# Removing urls and other unnecessary words (noise)
for token, tag in pos_tag(tweet_tokens):
token = re.sub('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+#]|[!*\(\),]|'\
'(?:%[0-9a-fA-F][0-9a-fA-F]))+','', token)
token = re.sub("(@[A-Za-z0-9_]+)","", token)
if tag.startswith("NN"):
pos = 'n'
elif tag.startswith('VB'):
pos = 'v'
else:
pos = 'a'
# Word net lemmatizer : https://www.programcreek.com/python/example/81649/nltk.WordNetLemmatizer
lemmatizer = WordNetLemmatizer()
token = lemmatizer.lemmatize(token, pos)
# If the lemmatized tokens are not punctuation and they are not stop words -> add those tokens to the end of cleaned tokens
if len(token) > 0 and token not in string.punctuation and token.lower() not in stop_words:
cleaned_tokens.append(token.lower())
return cleaned_tokens
# Starting NLP
stop_words = stopwords.words('english')
# print(remove_noise(tweet_tokens[0], stop_words))
positive_tweet_tokens = twitter_samples.tokenized('positive_tweets.json')
negative_tweet_tokens = twitter_samples.tokenized('negative_tweets.json')
positive_cleaned_tokens_list = []
negative_cleaned_tokens_list = []
# Remove noise with above function remove_noise()
for tokens in positive_tweet_tokens:
positive_cleaned_tokens_list.append(remove_noise(tokens, stop_words))
for tokens in negative_tweet_tokens:
negative_cleaned_tokens_list.append(remove_noise(tokens, stop_words))
# print(positive_tweet_tokens[500])
# print(positive_cleaned_tokens_list[500])
# Turn 2D array of tokens in to single 1D array
def get_all_words(cleaned_tokens_list):
for tokens in cleaned_tokens_list:
for token in tokens:
yield token
all_pos_words = get_all_words(positive_cleaned_tokens_list)
freq_dist_pos = FreqDist(all_pos_words)
# print(freq_dist_pos.most_common(10))
def get_words_for_model(cleaned_tokens_list):
for tweet_tokens in cleaned_tokens_list:
yield dict([token, True] for token in tweet_tokens)
positive_tokens_for_model = get_words_for_model(positive_cleaned_tokens_list)
negative_tokens_for_model = get_words_for_model(negative_cleaned_tokens_list)
positive_dataset = [(tweet_dict, "Positive")
for tweet_dict in positive_tokens_for_model]
negative_dataset = [(tweet_dict, "Negative")
for tweet_dict in negative_tokens_for_model]
# Create a single data set with both negative and positive data sets prepared
dataset = positive_dataset + negative_dataset
# Shuffle the data set to mix positive and negative data to be split to train data and test data
random.shuffle(dataset)
# Split whole data set into train and test data
train_data = dataset[:9000]
test_data = dataset[9000:]
# train the NaiveBayesClassifier model with train_data
classifier = NaiveBayesClassifier.train(train_data)
print("Accuracy is:", classify.accuracy(classifier, test_data))
classifier.show_most_informative_features(15)
# print(classifier.show_most_informative_features(10))
custom_text = "This is a bad post. it gives out wrong idea to the people."
custom_tokens = remove_noise(word_tokenize(custom_text))
# Test print
print(classifier.classify(dict([token, True] for token in custom_tokens)))
# Flask API to be used in backend
@app.route("/NLP")
def hello():
# text as a http get request parameter
custom_tweet = request.args.get('text')
custom_tokens = remove_noise(word_tokenize(custom_tweet))
return classifier.classify(dict([token, True] for token in custom_tokens))
if __name__ == '__main__':
# Set the API port to 8083 and enable debug to view clear errors
app.run(debug=True, port=8083)
# 2021-210
"BEST BARK" - Dog care and owner consultation system
\ No newline at end of file
"BEST BARK" - Dog care and owner consultation system
**Main objective**
The main objective of the project "BEST BARK" mobile application is to provide consultations to dog owners to improve their knowledge when they are raising dogs.
Main Research questions
Due to the busy life of people, it is hard to have time to focus on their pet dogs. Also, they do not have an idea about the diseases their pet dogs are having due to the lack
of knowledge on their pet dogs. Some diseases can beserious while some diseases are not so.The most common problem of most dog owners are the skin diseases in their dogs.
This will be led to reduce the beauty of dogs. Moreover, these diseases can have side effects on them. Therefore, in this research an efficient and convenient system will be
created in order to facilitate the prediction of diseases by usingsymptoms and behaviors of dogs. This will be a good health benefit for the pet dogs. When it is considered about
raising dogs,dog owners need their dogs to be very healthy and energetic. Therefore, an exercise and diet plans are very essential things to create a healthy dog. But because
of the busy life of the dog owners, most dogs don't get enough exercise and a nutritious meal. Therefore, nowadays most domestic dogs don't have a healthy and energetic life. So,
in this research, an efficient and convenient system will create a proper exercise and diet plan that helps to keep dogs healthy. When someone raises a dog, many problems occur
to the dog owner during the dog's lifetime. So far there is not a system to get quick answers to those question that are faced by the dog owners.
Individual research question
Saluk M.M.B. - IT18124354
*What are the skin diseases to be diagnosed.
*How to get the pictures you need for this.
*What technology is used to train and test dataset.
Weerasundara D.A. - IT18116984
*How to identify dog’s diseases using symptoms?
*What diseases are expected to be predicted in dogs?
*To whom is it important if a chatbot is designed to predict dog diseases?
*What technologies are needed to create a chatbot?
Jayasekara A.P.S.D. - IT18150926
*How to create a knowledgebase.
*What are the technology I want to use.
*How to find data that I want.
*How to deploy our system.
Gunarathna K.A.G.I.P.T. - IT18523256
*How to find a dog's health value?
*What factors should consider when creating a diet plan for dogs?
*What factors should consider when creating an exercise plan for dogs?
*What technology should be used to implement this system?
Individual Objectives
Saluk M.M.B. - IT18124354
Detect skin diseases of a dog by using image processing and causes of disease are informed.
In this part, dog owner can find the dog’s skin condition by using image processing technology. When user upload his/her dog’s skin condition as an image through the
mobile phone, our system compare that image with our training dataset. After the analyzing process, system identifies whether it is a disease or a normal condition.
When it was skin disease system generate report with clear information about the disease. The system asks the user a round of questions to obtain the information needed
to prepare the report. Finally, system give option to users, only if they like, they can upload that disease picture into public database
Weerasundara D.A. - IT18116984
Chatbot for predict the disease of dog depending on the symptoms by using machine learning algorithms and decide is it needed to contact a veterinary surgeon or not.
When the user adds the dog’s symptoms to the system, the symptomatic words are recognized by using natural language processing.
From those symptomatic words that are identified by the system by using the machine learning algorithms, the system can predict the disease depending on the datasets.
The system identifies the disease and communicate it to the user and finally decides is it needed to contact a veterinary surgeon or not.
Jayasekara A.P.S.D. - IT18150926
Create system that will Provide the most appropriate solution to the dog owner's day to day problems related to his or her dog.
Provide answers using tags and other relevant information also other people that are using the system via knowledge base.
Gunarathna K.A.G.I.P.T. - IT18523256
Get the best training schedules for dogs using machine learning.
When the user enters the dog’s health details system will calculate the BFI (Body Fat Index) and shows the result. Using those results system will notify the user about th
dog risk level. Then after the system will create an exercise plan and a diet plan using machine learning.
Other necessary information
import numpy
import tensorflow as tf
from flask import Flask
from flask import request
from flask_cors import CORS
from tensorflow.keras.models import load_model
import keras
import predict_disease
app = Flask(__name__)
cors = CORS(app)
@app.route("/predict", methods =['POST'])
def predict():
data = numpy.array(request.get_json())
print(data)
prediction = predict_disease.predict(data)
return {"pred": str(numpy.argmax(prediction))}
if __name__ == '__main__':
app.run(debug=True, port=5001, host="0.0.0.0")
import numpy
import pandas as pd
import tensorflow as tf
from keras.models import load_model
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
from sklearn import metrics
from sklearn import preprocessing
def loadModel(model):
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(25, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(25, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(7, activation=tf.nn.softmax))
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model = load_model('model.h5')
return model
def trainModel(model, datasetFilePath):
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(25, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(25, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(7, activation=tf.nn.softmax))
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model_df = pd.read_csv(datasetFilePath)
leDiseases = preprocessing.LabelEncoder()
leDiseases.fit(model_df['Diseases'])
print(leDiseases.classes_)
model_df['Diseases'] = leDiseases.transform(model_df['Diseases'])
# print(dataDf.describe())q
leBreeds = preprocessing.LabelEncoder()
leBreeds.fit(model_df['Breeds'])
model_df['Breeds'] = leBreeds.transform(model_df['Breeds'])
dataDf = model_df.fillna(0)
print(model_df.shape)
X = model_df.values[:,0:62]
y = model_df.values[:,62]
print(y)
x_train,x_test,y_train,y_test = train_test_split(X,y, stratify=y,test_size=0.2)
x_train = tf.keras.utils.normalize(x_train, axis=1)
x_test = tf.keras.utils.normalize(x_test, axis=1)
history = model.fit(x_train, y_train, epochs=15, validation_data=(x_test, y_test))
print("History: ", history)
val_loss, val_acc = model.evaluate(x_test, y_test)
print(val_loss)
print(val_acc)
model.save('model.h5')
print(history.history)
# summarize history for accuracy
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
y_pred = model.predict_classes(x_test)
print(y_test)
print(y_pred)
cnf_matrix = metrics.confusion_matrix(y_test, y_pred)
print("===================")
print("Confusion matrix")
print("===================")
print(cnf_matrix)
return model
def predict(model, data):
return model.predict(numpy.array(data))
model = tf.keras.models.Sequential()
trainModel(model, "data.csv")
Age,Breeds,anorexia,high_fever,itching,pus_discharge,fever,dripping_urine,cough,vomiting,unable_to_swallowing,scratching,scratching_the_affected_ear_on_the_floor,blood_in_urine,eye_discharge,lethargy,patchy_hair_loss,rubbing_the_affected_ear_on_the_floor,strain_to_urinate,nasal_discharge,bloody_diarrhea,increased_salivation,crusty_skin_ulcers,frequently_rotating_movement,diarrhea,frequently_urination,mild_fever,drooling,skin_redness,rash,painful,pale_gums,licking_genital_area,abdominal_pain,seizures,inflammation_of_skin,hyperemic_gums,bloating,paralysis,unpleasant_odor,redness_on_the_ear,whining_when_urination,weight_loss,hypersensitivity,stiff_joints,muscle_twitching,weakness,odd_behavior,thickening_of_skin,abnormal_bleeding,dehydration,sores_on_the_abdomen,enlarged_lymph_nodes,sores_on_the_legs,increased_capillary_refill_time,difficulty_breathing,sores_on_the_ears,pain,sores_on_the_chest,sores_on_the_elbows,partial_paralysis,complete_paralysis,Diseases
2,german sheppherd,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,Parvovirus
2,german sheppherd,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,Parvovirus
2,german sheppherd,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,Parvovirus
10,mongrel,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,Rabies
10,mongrel,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,Rabies
10,mongrel,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,Rabies
6,mongrel,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,1,0,1,0,0,1,0,1,1,0,0,Canine Scabies
4,labrador retriever,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,1,0,1,0,0,1,0,1,1,0,0,Canine Scabies
6,german sheppherd,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,Otitis Media
6,german sheppherd,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,Otitis Media
6,german sheppherd,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,Otitis Media
3,labrador retriever,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
4,german sheppherd,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
7,mongrel,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
6,german sheppherd,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
9,mongrel,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
11,pomeranian,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
7,mongrel,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
5,german sheppherd,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
8,mongrel,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
3,mongrel,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
8,german sheppherd,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
5,pomeranian,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
6,german sheppherd,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
3,labrador retriever,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
7,german sheppherd,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
5,german sheppherd,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,Tick Fever
4,mongrel,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,Urinary tract infections (UTIs)
3,mongrel,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,Urinary tract infections (UTIs)
6,german sheppherd,1,0,0,0,1,0,1,1,0,0,0,0,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,1,Canine Distemper
4,german sheppherd,1,0,0,0,1,0,1,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,Canine Distemper
7,mongrel,1,0,0,0,1,0,1,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,Canine Distemper
10,pomeranian,1,0,0,0,1,0,1,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,0,Canine Distemper
8,mongrel,1,0,0,0,1,0,1,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,1,Canine Distemper
8,labrador retriever,1,0,0,0,1,0,1,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,Canine Distemper
7,pomeranian,1,0,0,0,1,0,1,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,Canine Distemper
5,mongrel,1,0,0,0,1,0,1,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,0,Canine Distemper
6,german sheppherd,1,0,0,0,1,0,1,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,1,Canine Distemper
9,mongrel,1,0,0,0,1,0,1,1,0,0,0,0,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,Canine Distemper
4,mongrel,1,0,0,0,1,0,1,1,0,0,0,0,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,Canine Distemper
6,pomeranian,1,0,0,0,1,0,1,1,0,0,0,0,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,0,Canine Distemper
7,german sheppherd,1,0,0,0,1,0,1,1,0,0,0,0,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,1,Canine Distemper
8,mongrel,1,0,0,0,1,0,1,1,0,0,0,0,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,Canine Distemper
7,pomeranian,1,0,0,0,1,0,1,1,0,0,0,0,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,Canine Distemper
9,german sheppherd,1,0,0,0,1,0,1,1,0,0,0,0,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,0,Canine Distemper
import numpy
import pandas as pd
import tensorflow as tf
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
from sklearn import preprocessing
dataDf = pd.read_csv("data.csv")
leDiseases = preprocessing.LabelEncoder()
leDiseases.fit(dataDf['Diseases'])
print(leDiseases.classes_)
dataDf['Diseases'] = leDiseases.transform(dataDf['Diseases'])
# print(dataDf.describe())
leBreeds = preprocessing.LabelEncoder()
leBreeds.fit(dataDf['Breeds'])
dataDf['Breeds'] = leBreeds.transform(dataDf['Breeds'])
dataDf = dataDf.fillna(0)
model = tf.keras.models.Sequential()
def trainModel(model, datasetFilePath):
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(256, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(128, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(7, activation=tf.nn.softmax))
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model_df = pd.read_csv(datasetFilePath)
print(dataDf.shape)
X = dataDf[['Age','Breeds','anorexia','high_fever','itching','pus_discharge','fever','dripping_urine','cough','vomiting','unable_to_swallowing','scratching','scratching_the_affected_ear_on_the_floor','blood_in_urine','eye_discharge','lethargy','patchy_hair_loss','rubbing_the_affected_ear_on_the_floor','strain_to_urinate','nasal_discharge','bloody_diarrhea','increased_salivation','crusty_skin_ulcers','frequently_rotating_movement','diarrhea','frequently_urination','mild_fever','drooling','skin_redness','rash','painful','pale_gums','licking_genital_area','abdominal_pain','seizures','inflammation_of_skin','hyperemic_gums','bloating','paralysis','unpleasant_odor','redness_on_the_ear','whining_when_urination','weight_loss','hypersensitivity','stiff_joints','muscle_twitching','weakness','odd_behavior','thickening_of_skin','abnormal_bleeding','dehydration','sores_on_the_abdomen','enlarged_lymph_nodes','sores_on_the_legs','increased_capillary_refill_time','difficulty_breathing','sores_on_the_ears','pain','sores_on_the_chest','sores_on_the_elbows','partial_paralysis','complete_paralysis']]
y = dataDf[['Diseases']]
# print(y)
x_train,x_test,y_train,y_test = train_test_split(X,y, stratify=y,test_size=0.2)
# x_train = tf.keras.utils.normalize(x_train, axis=1)
# x_test = tf.keras.utils.normalize(x_test, axis=1)
print("==================")
print(x_train)
print("==================")
print(y_train)
print("==================")
history = model.fit(x_train, y_train, epochs=50, validation_data=(x_test, y_test))
val_loss, val_acc = model.evaluate(x_test, y_test)
print("Loss: ",val_loss)
print("Accuracy: ",val_acc*100, "%")
model.save('model.h5')
print(history.history)
# summarize history for accuracy
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
return model
def predict(model, data):
return model.predict(numpy.array(data))
trainModel(model, "data.csv")
import numpy
import tensorflow as tf
from flask import Flask
from flask import request
from flask_cors import CORS
from tensorflow.keras.models import load_model
import keras
app = Flask(__name__)
cors = CORS(app)
@app.route("/predict", methods =['POST'])
def predict():
model = tf.keras.models.Sequential()
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(25, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(25, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(7, activation=tf.nn.softmax))
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model = load_model('model.h5')
data = numpy.array(request.get_json())
print(data)
prediction = model.predict(data)
return {"pred": str(numpy.argmax(prediction))}
if __name__ == '__main__':
app.run(debug=True, port=5001)
disease_labels = ["Canine Scabies","Canine demodicosis","Fungal dermatitis","Allergic dermatitis","Ehrlichiosis","Canine babesiosis","Immune Mediated Haemolytic Anaemia","Acquired Thrombocytopenia","Hypoglycemia","Hypocalcemia","Canine parvo virus infection","Canine corona virus infection","Sphingomyelinase","Rabies","Bacterial prostatitis","Balanoposthitis","Canine brucellosis","Canine Cirrhosis","Bang's Disease","Candidiasis","Canine distemper","Canine influenza","Canine lymphoma","Lyme disease","Kidney disease","Kennel Cough","Diarrhoea","Gastritis","Gastroenteritis","Hepatitis","Giardia infection","Canine herpes virus","Galucoma","Gastric ulcer","Hepatic Encephalopathy","Hepatic lipidosis","Pneumonia","Diabetes","Ear infections","Heartworm","Anemia","Canine Leptospirosis","HEATSTROKE","Canine Hip Dysplasia","Canine Elbow Dysplasia"]
print(len(disease_labels))
import numpy
import tensorflow as tf
from tensorflow.keras.models import load_model
model = tf.keras.models.Sequential()
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(256, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(128, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(11, activation=tf.nn.softmax))
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model = load_model('model.h5')
def predict(data):
return model.predict(numpy.array(data))
# Test prediction
print(predict([[1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0]]))
print(numpy.argmax(predict([[1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0]])))
print(disease_labels[numpy.argmax(predict([[1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0]]))])
import tensorflow as tf
import numpy
from tensorflow.keras.models import load_model
model = tf.keras.models.Sequential()
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(25, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(25, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(7, activation=tf.nn.softmax))
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model = load_model('model.h5')
prediction = model.predict(numpy.array([[6,1,1,1,1,1,104,1,0,1,1,1,0,1,1,1,0,0,0,1,1,0,0,0,1,1,1,1,6,1,1,1,1,1,104,1,0,1,1,1,0,1,1,1,0,0,0,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1]]))
print(prediction)
.idea
data
model_saved.h5
\ No newline at end of file
# importing libraries
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras import backend as K
import matplotlib.pyplot as plt
img_width, img_height = 350, 350
train_data_dir = 'data/train'
validation_data_dir = 'data/test'
nb_train_samples = 168
nb_validation_samples = 20
epochs = 20
batch_size = 20
if K.image_data_format() == 'channels_first':
input_shape = (3, img_width, img_height)
else:
input_shape = (img_width, img_height, 3)
model = Sequential()
model.add(Conv2D(32, (2, 2), input_shape = input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size =(2, 2)))
model.add(Conv2D(32, (2, 2)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size =(2, 2)))
model.add(Conv2D(64, (2, 2)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size =(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(2))
model.add(Activation('softmax'))
model.compile(loss ='sparse_categorical_crossentropy',
optimizer ='adam',
metrics =['accuracy'])
train_datagen = ImageDataGenerator(
rescale = 1. / 255,
shear_range = 0.2,
zoom_range = 0.2,
horizontal_flip = True)
test_datagen = ImageDataGenerator(rescale = 1. / 255)
train_generator = train_datagen.flow_from_directory(train_data_dir,
target_size =(img_width, img_height),
batch_size = batch_size, class_mode ='sparse')
validation_generator = test_datagen.flow_from_directory(
validation_data_dir,
target_size =(img_width, img_height),
batch_size = batch_size, class_mode ='sparse')
history = model.fit_generator(train_generator,
steps_per_epoch = nb_train_samples // batch_size,
epochs = epochs, validation_data = validation_generator,
validation_steps = nb_validation_samples // batch_size)
model.save_weights('model_saved.h5')
print(history.history)
# summarize history for accuracy
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
from keras.layers import Activation
import keras
import numpy as np
import tensorflow
from keras import backend as K
from keras.layers import Activation
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Dense, Dropout, Flatten
from keras.models import Sequential
print(tensorflow.__version__)
print(keras.__version__)
def predictImageClass(image):
K.clear_session()
# tensorflow.reset_default_graph()
nb_train_samples = 112
nb_validation_samples = 20
epochs = 10
batch_size = 2
img_width, img_height = 350, 350
if K.image_data_format() == 'channels_first':
input_shape = (3, img_width, img_height)
else:
input_shape = (img_width, img_height, 3)
modelI = Sequential()
modelI.add(Conv2D(32, (2, 2), input_shape=input_shape))
modelI.add(Activation('relu'))
modelI.add(MaxPooling2D(pool_size=(2, 2)))
modelI.add(Conv2D(32, (2, 2)))
modelI.add(Activation('relu'))
modelI.add(MaxPooling2D(pool_size=(2, 2)))
modelI.add(Conv2D(64, (2, 2)))
modelI.add(Activation('relu'))
modelI.add(MaxPooling2D(pool_size=(2, 2)))
modelI.add(Flatten())
modelI.add(Dense(64))
modelI.add(Activation('relu'))
modelI.add(Dropout(0.5))
modelI.add(Dense(2))
modelI.add(Activation('softmax'))
modelI.compile(loss='sparse_categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
modelI.load_weights('model_saved.h5')
sess = keras.backend.get_session()
# image = request.args.get('image', default = 1, type = str)
print("------------------")
print(image)
print("------------------")
img = tensorflow.read_file(image)
img = tensorflow.image.decode_jpeg(img, channels=3)
img.set_shape([None, None, 3])
img = tensorflow.image.resize_images(img, (350, 350))
img = img.eval(session=sess) # convert to numpy array
img = np.expand_dims(img, 0) # make 'batch' of 1
pred = modelI.predict(img)
# pred = labels["label_names"][np.argmax(pred)]
print(pred)
y = ["Sarcoptic & demodectic mange", "Ring worms"]
print(np.argmax(pred))
print(y[np.argmax(pred)])
return {
"pred": y[np.argmax(pred)],
"accuracy": str(pred[0][np.argmax(pred)])
}
# predictImageClass("data/test/Ringworm/Ringworm1.jpg")
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment