Commit dbfdbd28 authored by Sachindu's avatar Sachindu

Api created

parent 22b1a4ea
# Default ignored files
/shelf/
/workspace.xml
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/venv" />
</content>
<orderEntry type="jdk" jdkName="Python 3.10" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PyDocumentationSettings">
<option name="format" value="PLAIN" />
<option name="myDocStringFormat" value="Plain" />
</component>
</module>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.10" project-jdk-type="Python SDK" />
<component name="PyCharmProfessionalAdvertiser">
<option name="shown" value="true" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/ChatbotBE.iml" filepath="$PROJECT_DIR$/.idea/ChatbotBE.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
web gunicorn mysite.wsgi:application --log-file -
\ No newline at end of file
from django.contrib import admin
from .models import Chat
admin.site.register(Chat)
from django.apps import AppConfig
class ApiConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'api'
# Generated by Django 4.1 on 2022-08-25 06:14
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.TextField()),
('updated', models.DateTimeField(auto_now=True)),
('created', models.DateTimeField(auto_now_add=True)),
],
options={
'ordering': ['-updated'],
},
),
]
from django.db import models
class Chat(models.Model):
body = models.TextField()
updated = models.DateTimeField(auto_now=True)
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.body[0:50]
class Meta:
ordering = ['-updated']
from rest_framework.serializers import ModelSerializer
from .models import Chat
class ChatSerializer(ModelSerializer):
class Meta:
model = Chat
fields = '__all__'
from django.test import TestCase
# Create your tests here.
from django.urls import path
from . import views
urlpatterns = [
path('chat/', views.getChatBot),
path('predict/', views.getPrecdict),
]
from django.shortcuts import render
from rest_framework.decorators import api_view
from rest_framework.response import Response
from chat import get_response, get_sa
@api_view(['GET'])
def getChatBot(request):
return Response("Welcome to Canis care Vet bot!")
@api_view(['POST'])
def getPrecdict(request):
data = request.data
message = data['message']
output = get_response(message)
sa = get_sa(message)
result = output+","+str(sa)
return Response(result)
import random
import json
import torch
from textblob import TextBlob
from model import NeuralNet
from nltk_utils import bag_of_words, tokenize
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
with open('intents.json', 'r') as json_data:
intents = json.load(json_data)
FILE = "model.h5"
data = torch.load(FILE)
input_size = data["input_size"]
hidden_size = data["hidden_size"]
output_size = data["output_size"]
all_words = data['all_words']
tags = data['tags']
model_state = data["model_state"]
model = NeuralNet(input_size, hidden_size, output_size).to(device)
model.load_state_dict(model_state)
model.eval()
bot_name = "VetBot"
def get_response(msg):
sentence = tokenize(msg)
X = bag_of_words(sentence, all_words)
X = X.reshape(1, X.shape[0])
X = torch.from_numpy(X).to(device)
output = model(X)
_, predicted = torch.max(output, dim=1)
tag = tags[predicted.item()]
probs = torch.softmax(output, dim=1)
prob = probs[0][predicted.item()]
if prob.item() > 0.75:
for intent in intents['intents']:
if tag == intent["tag"]:
return random.choice(intent['bot_response'])
return "I didn't get that, try again."
def get_sa(msg):
edu = TextBlob(msg)
sa = edu.sentiment.polarity
return sa
if __name__ == "__main__":
print("Welcome to Canis care Vet bot!")
while True:
sentence = input("You: ")
if sentence == "quit":
break
# getting sentiment analysis value
resp = get_response(sentence)
print(resp)
...@@ -111,8 +111,8 @@ ...@@ -111,8 +111,8 @@
"What are the precautions for Dandruff?", "What are the precautions for Dandruff?",
"What are the Dandruff precautions?", "What are the Dandruff precautions?",
"What are some Dandruff preventative measures?", "What are some Dandruff preventative measures?",
"How can I prevent my dog ​​from Dandruff?", "How could I prevent my dog ​​from Dandruff?",
"How can I keep my dog from getting Dandruff?", "How could I keep my dog from getting Dandruff?",
"Precautions for Dandruff?" "Precautions for Dandruff?"
], ],
"bot_response": [ "bot_response": [
...@@ -486,6 +486,41 @@ ...@@ -486,6 +486,41 @@
"Flea allergy, Food allergy, Inhalant or contact allergy", "Flea allergy, Food allergy, Inhalant or contact allergy",
"Allergy to the normal bacterial flora and Yeast organisms of the skin" "Allergy to the normal bacterial flora and Yeast organisms of the skin"
] ]
},
{
"tag": "identify1",
"patterns": [
"Itchiness"
],
"bot_response": ["May be Ringworm or Dandruff or Lupus or Canine Atopic Dermatitis or Mange or Folliculitis"]
},
{
"tag": "identify2",
"patterns": [
"Hair loss"
],
"bot_response": ["May be Ringworm or Dandruff or Yeast Infections or Mange or Folliculitis"]
},
{
"tag": "identify3",
"patterns": [
"Scratching"
],
"bot_response": ["May be Ringworm or Ticks and Fleas or Yeast Infections or Lupus or Canine Atopic Dermatitis"]
},
{
"tag": "identify4",
"patterns": [
"Redness"
],
"bot_response": ["May be Canine Atopic Dermatitis or Mange or Folliculitis or Lupus or Canine Atopic Dermatitis"]
},
{
"tag": "identify5",
"patterns": [
"Papules"
],
"bot_response": ["May be Impetigo or Folliculitis"]
} }
] ]
} }
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
import torch
import torch.nn as nn
class NeuralNet(nn.Module):
def __init__(self, input_size, hidden_size, num_classes):
super(NeuralNet, self).__init__()
self.l1 = nn.Linear(input_size, hidden_size)
self.l2 = nn.Linear(hidden_size, hidden_size)
self.l3 = nn.Linear(hidden_size, num_classes)
self.relu = nn.ReLU()
def forward(self, x):
out = self.l1(x)
out = self.relu(out)
out = self.l2(out)
out = self.relu(out)
out = self.l3(out)
# no activation and no softmax at the end
return out
"""
ASGI config for mysite project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
application = get_asgi_application()
"""
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 3.2.15.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
from pathlib import Path
import django_heroku
import dj_database_url
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-0gurly7=gyq2pso@wuq_t&*&=268k=0e&ro8n4t49(e#rtt7l$'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'api.apps.ApiConfig',
'rest_framework',
'corsheaders'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
"corsheaders.middleware.CorsMiddleware",
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
django_heroku.settings(locals())
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
CORS_ALLOW_ALL_ORIGINS = True
\ No newline at end of file
"""mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('api.urls'))
]
"""
WSGI config for mysite project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
application = get_wsgi_application()
import numpy as np
import nltk
# nltk.download('punkt')
from nltk.stem.porter import PorterStemmer
stemmer = PorterStemmer()
def tokenize(sentence):
return nltk.word_tokenize(sentence)
def stem(word):
return stemmer.stem(word.lower())
def bag_of_words(tokenized_sentence, words):
# stem each word
sentence_words = [stem(word) for word in tokenized_sentence]
# initialize bag with 0 for each word
bag = np.zeros(len(words), dtype=np.float32)
for idx, w in enumerate(words):
if w in sentence_words:
bag[idx] = 1
return bag
asgiref==3.5.2
click==8.1.3
colorama==0.4.5
dj-database-url==1.0.0
Django==4.1.2
django-heroku==0.3.1
gunicorn==20.1.0
joblib==1.2.0
nltk==3.7
numpy==1.23.3
psycopg2==2.9.4
regex==2022.9.13
sqlparse==0.4.3
textblob==0.17.1
torch==1.12.1
tqdm==4.64.1
typing_extensions==4.4.0
tzdata==2022.4
whitenoise==6.2.0
import numpy as np
import random
import json
import torch
import torch.nn as nn
from torch.utils.data import Dataset, DataLoader
from nltk_utils import bag_of_words, tokenize, stem
from model import NeuralNet
with open('intents.json', 'r') as f:
intents = json.load(f)
all_words = []
tags = []
xy = []
# loop through each sentence in our intents patterns
for intent in intents['intents']:
tag = intent['tag']
# add to tag list
tags.append(tag)
for pattern in intent['patterns']:
# tokenize each word in the sentence
w = tokenize(pattern)
# add to our words list
all_words.extend(w)
# add to xy pair
xy.append((w, tag))
# stem and lower each word
ignore_words = ['?', '.', '!']
all_words = [stem(w) for w in all_words if w not in ignore_words]
# remove duplicates and sort
all_words = sorted(set(all_words))
tags = sorted(set(tags))
print(len(xy), "patterns")
print(len(tags), "tags:", tags)
print(len(all_words), "unique stemmed words:", all_words)
# create training data
X_train = []
y_train = []
for (pattern_sentence, tag) in xy:
# X: bag of words for each pattern_sentence
bag = bag_of_words(pattern_sentence, all_words)
X_train.append(bag)
# y: PyTorch CrossEntropyLoss needs only class labels, not one-hot
label = tags.index(tag)
y_train.append(label)
X_train = np.array(X_train)
y_train = np.array(y_train)
# Hyper-parameters
num_epochs = 2000
batch_size = 8
learning_rate = 0.001
input_size = len(X_train[0])
hidden_size = 8
output_size = len(tags)
print(input_size, output_size)
class ChatDataset(Dataset):
def __init__(self):
self.n_samples = len(X_train)
self.x_data = X_train
self.y_data = y_train
# support indexing such that dataset[i] can be used to get i-th sample
def __getitem__(self, index):
return self.x_data[index], self.y_data[index]
# we can call len(dataset) to return the size
def __len__(self):
return self.n_samples
dataset = ChatDataset()
train_loader = DataLoader(dataset=dataset,
batch_size=batch_size,
shuffle=True,
num_workers=0)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = NeuralNet(input_size, hidden_size, output_size).to(device)
# Loss and optimizer
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
# Train the model
for epoch in range(num_epochs):
for (words, labels) in train_loader:
words = words.to(device)
labels = labels.to(dtype=torch.long).to(device)
# Forward pass
outputs = model(words)
# if y would be one-hot, we must apply
# labels = torch.max(labels, 1)[1]
loss = criterion(outputs, labels)
# Backward and optimize
optimizer.zero_grad()
loss.backward()
optimizer.step()
if (epoch + 1) % 100 == 0:
print(f'Epoch [{epoch + 1}/{num_epochs}], Loss: {loss.item():.4f}')
print(f'final loss: {loss.item():.4f}')
data = {
"model_state": model.state_dict(),
"input_size": input_size,
"hidden_size": hidden_size,
"output_size": output_size,
"all_words": all_words,
"tags": tags
}
FILE = "model.h5"
torch.save(data, FILE)
print(f'training complete. file saved to {FILE}')
{"cells":[{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":173470,"status":"ok","timestamp":1660462996396,"user":{"displayName":"Sachindu Gimhana","userId":"13341145685503871291"},"user_tz":-330},"id":"0V8EjOkXGTFb","outputId":"5524a27d-8f30-4905-dc82-b49d25e19bda"},"outputs":[],"source":["!pip install tflearn\n","!pip install textblob\n","\n","import nltk\n","nltk.download('punkt')\n","from nltk.stem.lancaster import LancasterStemmer\n","stemmer = LancasterStemmer()\n","from textblob import TextBlob\n","import numpy\n","import tflearn\n","import tensorflow\n","import random\n","import json\n","import pickle\n","import matplotlib.pyplot as plt\n","\n","with open(\"intents.json\") as file:\n"," data = json.load(file)\n","\n","try:\n"," with open(\"data.pickle\", \"rb\") as f:\n"," words, labels, training, output = pickle.load(f)\n","except:\n"," words = []\n"," labels = []\n"," docs_x = []\n"," docs_y = []\n","\n"," for intent in data[\"intents\"]:\n"," for pattern in intent[\"patterns\"]:\n"," wrds = nltk.word_tokenize(pattern)\n"," words.extend(wrds)\n"," docs_x.append(wrds)\n"," docs_y.append(intent[\"tag\"])\n","\n"," if intent[\"tag\"] not in labels:\n"," labels.append(intent[\"tag\"])\n","\n"," words = [stemmer.stem(w.lower()) for w in words if w != \"?\"]\n"," words = sorted(list(set(words)))\n","\n"," labels = sorted(labels)\n","\n"," training = []\n"," output = []\n","\n"," out_empty = [0 for _ in range(len(labels))]\n","\n"," for x, doc in enumerate(docs_x):\n"," bag = []\n","\n"," wrds = [stemmer.stem(w.lower()) for w in doc]\n","\n"," for w in words:\n"," if w in wrds:\n"," bag.append(1)\n"," else:\n"," bag.append(0)\n","\n"," output_row = out_empty[:]\n"," output_row[labels.index(docs_y[x])] = 1\n","\n"," training.append(bag)\n"," output.append(output_row)\n","\n","\n"," training = numpy.array(training)\n"," output = numpy.array(output)\n","\n"," with open(\"data.pickle\", \"wb\") as f:\n"," pickle.dump((words, labels, training, output), f)\n","\n","from tensorflow.python.framework import ops\n","ops.reset_default_graph()\n","\n","net = tflearn.input_data(shape=[None, len(training[0])])\n","net = tflearn.fully_connected(net, 8)\n","net = tflearn.fully_connected(net, 8)\n","net = tflearn.fully_connected(net, len(output[0]), activation=\"softmax\")\n","net = tflearn.regression(net)\n","\n","model = tflearn.DNN(net)\n","\n","try:\n"," model.load(\"model.tflearn\")\n","except:\n"," train = model.fit(training, output, n_epoch=2000, batch_size=8, show_metric=True)\n"," model.save(\"model.tflearn\")"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"o5kZkho2SH1n"},"outputs":[],"source":["def bag_of_words(s, words):\n"," bag = [0 for _ in range(len(words))]\n","\n"," s_words = nltk.word_tokenize(s)\n"," s_words = [stemmer.stem(word.lower()) for word in s_words]\n","\n"," for se in s_words:\n"," for i, w in enumerate(words):\n"," if w == se:\n"," bag[i] = 1\n"," \n"," return numpy.array(bag)\n"," \n","def chat():\n"," print(\"Welcome to Canis Care VetBot (type quit to stop)!\")\n"," while True:\n"," inp = input(\"You: \")\n"," if inp.lower() == \"quit\":\n"," break\n"," \n"," #getting sentiment analysis value\n"," edu=TextBlob(inp)\n"," sa=edu.sentiment.polarity\n"," print(\"Sentiment Value is : \",sa) \n","\n"," results = model.predict([bag_of_words(inp, words)])[0]\n"," results_index = numpy.argmax(results)\n"," tag = labels[results_index]\n","\n"," if results[results_index] > 0.7:\n"," for tg in data[\"intents\"]:\n"," if tg['tag'] == tag:\n"," responses = tg['bot_response']\n","\n"," print(random.choice(responses))\n","\n"," else:\n"," print(\"I didn't get that, try again.\")\n","\n","chat()"]}],"metadata":{"colab":{"authorship_tag":"ABX9TyMO0aMAMOJF7Y+oufnfsl+s","collapsed_sections":[],"name":"Vet_Bot.ipynb","provenance":[]},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"name":"python"}},"nbformat":4,"nbformat_minor":0}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment