Commit 22b1a4ea authored by Sachindu's avatar Sachindu

Updated

parent b2250c67
{"cells":[{"cell_type":"code","execution_count":3,"metadata":{"id":"0V8EjOkXGTFb","colab":{"base_uri":"https://localhost:8080/"},"outputId":"5524a27d-8f30-4905-dc82-b49d25e19bda","executionInfo":{"status":"ok","timestamp":1660462996396,"user_tz":-330,"elapsed":173470,"user":{"displayName":"Sachindu Gimhana","userId":"13341145685503871291"}}},"outputs":[{"output_type":"stream","name":"stdout","text":["Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n","Requirement already satisfied: tflearn in /usr/local/lib/python3.7/dist-packages (0.5.0)\n","Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from tflearn) (1.21.6)\n","Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from tflearn) (1.15.0)\n","Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from tflearn) (7.1.2)\n","Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n","Requirement already satisfied: textblob in /usr/local/lib/python3.7/dist-packages (0.15.3)\n","Requirement already satisfied: nltk>=3.1 in /usr/local/lib/python3.7/dist-packages (from textblob) (3.7)\n","Requirement already satisfied: click in /usr/local/lib/python3.7/dist-packages (from nltk>=3.1->textblob) (7.1.2)\n","Requirement already satisfied: joblib in /usr/local/lib/python3.7/dist-packages (from nltk>=3.1->textblob) (1.1.0)\n","Requirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from nltk>=3.1->textblob) (4.64.0)\n","Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.7/dist-packages (from nltk>=3.1->textblob) (2022.6.2)\n"]},{"output_type":"stream","name":"stderr","text":["[nltk_data] Downloading package punkt to /root/nltk_data...\n","[nltk_data] Package punkt is already up-to-date!\n"]},{"name":"stdout","output_type":"stream","text":["Welcome to Canis Care VetBot (type quit to stop)!\n","You: hi\n","Sentiment Value is : 0.0\n","Hi there, what can I do for you?\n","You: what are the dog skin diseases\n","Sentiment Value is : 0.0\n","Ringworm, Acral Lick Dermatitis, Mange\n","You: what are the dog skin diseases\n","Sentiment Value is : 0.0\n","Folliculitis, Impetigo, Yeast Infections, Ticks and Fleas\n","You: dog skin diseases\n","Sentiment Value is : 0.0\n","Folliculitis, Impetigo, Yeast Infections, Ticks and Fleas\n","You: precations for Mange\n","Sentiment Value is : 0.0\n","Sores and lesions, Scabby, crusty or scaly skin\n","You: symptoms for mange\n","Sentiment Value is : 0.0\n","Sores and lesions, Scabby, crusty or scaly skin\n","You: thanks\n","Sentiment Value is : 0.2\n","The pleasure is all mine!\n","You: good be\n","Sentiment Value is : 0.7\n","Hello, thanks for visiting\n","You: goood bye\n","Sentiment Value is : 0.0\n","Have a nice day\n","You: eit\n","Sentiment Value is : 0.0\n","I didn't get that, try again.\n","You: exit\n","Sentiment Value is : 0.0\n","I didn't get that, try again.\n","You: quit\n"]}],"source":["!pip install tflearn\n","!pip install textblob\n","\n","import nltk\n","nltk.download('punkt')\n","from nltk.stem.lancaster import LancasterStemmer\n","stemmer = LancasterStemmer()\n","from textblob import TextBlob\n","import numpy\n","import tflearn\n","import tensorflow\n","import random\n","import json\n","import pickle\n","\n","with open(\"intents.json\") as file:\n"," data = json.load(file)\n","\n","try:\n"," with open(\"data.pickle\", \"rb\") as f:\n"," words, labels, training, output = pickle.load(f)\n","except:\n"," words = []\n"," labels = []\n"," docs_x = []\n"," docs_y = []\n","\n"," for intent in data[\"intents\"]:\n"," for pattern in intent[\"patterns\"]:\n"," wrds = nltk.word_tokenize(pattern)\n"," words.extend(wrds)\n"," docs_x.append(wrds)\n"," docs_y.append(intent[\"tag\"])\n","\n"," if intent[\"tag\"] not in labels:\n"," labels.append(intent[\"tag\"])\n","\n"," words = [stemmer.stem(w.lower()) for w in words if w != \"?\"]\n"," words = sorted(list(set(words)))\n","\n"," labels = sorted(labels)\n","\n"," training = []\n"," output = []\n","\n"," out_empty = [0 for _ in range(len(labels))]\n","\n"," for x, doc in enumerate(docs_x):\n"," bag = []\n","\n"," wrds = [stemmer.stem(w.lower()) for w in doc]\n","\n"," for w in words:\n"," if w in wrds:\n"," bag.append(1)\n"," else:\n"," bag.append(0)\n","\n"," output_row = out_empty[:]\n"," output_row[labels.index(docs_y[x])] = 1\n","\n"," training.append(bag)\n"," output.append(output_row)\n","\n","\n"," training = numpy.array(training)\n"," output = numpy.array(output)\n","\n"," with open(\"data.pickle\", \"wb\") as f:\n"," pickle.dump((words, labels, training, output), f)\n","\n","from tensorflow.python.framework import ops\n","ops.reset_default_graph()\n","\n","net = tflearn.input_data(shape=[None, len(training[0])])\n","net = tflearn.fully_connected(net, 8)\n","net = tflearn.fully_connected(net, 8)\n","net = tflearn.fully_connected(net, len(output[0]), activation=\"softmax\")\n","net = tflearn.regression(net)\n","\n","model = tflearn.DNN(net)\n","\n","try:\n"," model.load(\"model.tflearn\")\n","except:\n"," train = model.fit(training, output, n_epoch=2000, batch_size=8, show_metric=True)\n"," model.save(\"model.tflearn\")\n","\n","def bag_of_words(s, words):\n"," bag = [0 for _ in range(len(words))]\n","\n"," s_words = nltk.word_tokenize(s)\n"," s_words = [stemmer.stem(word.lower()) for word in s_words]\n","\n"," for se in s_words:\n"," for i, w in enumerate(words):\n"," if w == se:\n"," bag[i] = 1\n"," \n"," return numpy.array(bag)\n"," \n","def chat():\n"," print(\"Welcome to Canis Care VetBot (type quit to stop)!\")\n"," while True:\n"," inp = input(\"You: \")\n"," if inp.lower() == \"quit\":\n"," break\n"," \n"," #getting sentiment analysis value\n"," edu=TextBlob(inp)\n"," sa=edu.sentiment.polarity\n"," print(\"Sentiment Value is : \",sa) \n","\n"," results = model.predict([bag_of_words(inp, words)])[0]\n"," results_index = numpy.argmax(results)\n"," tag = labels[results_index]\n","\n"," if results[results_index] > 0.7:\n"," for tg in data[\"intents\"]:\n"," if tg['tag'] == tag:\n"," responses = tg['bot_response']\n","\n"," print(random.choice(responses))\n","\n"," else:\n"," print(\"I didn't get that, try again.\")\n","\n","chat()"]},{"cell_type":"code","source":[""],"metadata":{"id":"o5kZkho2SH1n"},"execution_count":null,"outputs":[]}],"metadata":{"colab":{"collapsed_sections":[],"name":"Vet_Bot.ipynb","provenance":[],"authorship_tag":"ABX9TyMO0aMAMOJF7Y+oufnfsl+s"},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"name":"python"}},"nbformat":4,"nbformat_minor":0}
\ No newline at end of file
{"cells":[{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":173470,"status":"ok","timestamp":1660462996396,"user":{"displayName":"Sachindu Gimhana","userId":"13341145685503871291"},"user_tz":-330},"id":"0V8EjOkXGTFb","outputId":"5524a27d-8f30-4905-dc82-b49d25e19bda"},"outputs":[],"source":["!pip install tflearn\n","!pip install textblob\n","\n","import nltk\n","nltk.download('punkt')\n","from nltk.stem.lancaster import LancasterStemmer\n","stemmer = LancasterStemmer()\n","from textblob import TextBlob\n","import numpy\n","import tflearn\n","import tensorflow\n","import random\n","import json\n","import pickle\n","import matplotlib.pyplot as plt\n","\n","with open(\"intents.json\") as file:\n"," data = json.load(file)\n","\n","try:\n"," with open(\"data.pickle\", \"rb\") as f:\n"," words, labels, training, output = pickle.load(f)\n","except:\n"," words = []\n"," labels = []\n"," docs_x = []\n"," docs_y = []\n","\n"," for intent in data[\"intents\"]:\n"," for pattern in intent[\"patterns\"]:\n"," wrds = nltk.word_tokenize(pattern)\n"," words.extend(wrds)\n"," docs_x.append(wrds)\n"," docs_y.append(intent[\"tag\"])\n","\n"," if intent[\"tag\"] not in labels:\n"," labels.append(intent[\"tag\"])\n","\n"," words = [stemmer.stem(w.lower()) for w in words if w != \"?\"]\n"," words = sorted(list(set(words)))\n","\n"," labels = sorted(labels)\n","\n"," training = []\n"," output = []\n","\n"," out_empty = [0 for _ in range(len(labels))]\n","\n"," for x, doc in enumerate(docs_x):\n"," bag = []\n","\n"," wrds = [stemmer.stem(w.lower()) for w in doc]\n","\n"," for w in words:\n"," if w in wrds:\n"," bag.append(1)\n"," else:\n"," bag.append(0)\n","\n"," output_row = out_empty[:]\n"," output_row[labels.index(docs_y[x])] = 1\n","\n"," training.append(bag)\n"," output.append(output_row)\n","\n","\n"," training = numpy.array(training)\n"," output = numpy.array(output)\n","\n"," with open(\"data.pickle\", \"wb\") as f:\n"," pickle.dump((words, labels, training, output), f)\n","\n","from tensorflow.python.framework import ops\n","ops.reset_default_graph()\n","\n","net = tflearn.input_data(shape=[None, len(training[0])])\n","net = tflearn.fully_connected(net, 8)\n","net = tflearn.fully_connected(net, 8)\n","net = tflearn.fully_connected(net, len(output[0]), activation=\"softmax\")\n","net = tflearn.regression(net)\n","\n","model = tflearn.DNN(net)\n","\n","try:\n"," model.load(\"model.tflearn\")\n","except:\n"," train = model.fit(training, output, n_epoch=2000, batch_size=8, show_metric=True)\n"," model.save(\"model.tflearn\")"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"o5kZkho2SH1n"},"outputs":[],"source":["def bag_of_words(s, words):\n"," bag = [0 for _ in range(len(words))]\n","\n"," s_words = nltk.word_tokenize(s)\n"," s_words = [stemmer.stem(word.lower()) for word in s_words]\n","\n"," for se in s_words:\n"," for i, w in enumerate(words):\n"," if w == se:\n"," bag[i] = 1\n"," \n"," return numpy.array(bag)\n"," \n","def chat():\n"," print(\"Welcome to Canis Care VetBot (type quit to stop)!\")\n"," while True:\n"," inp = input(\"You: \")\n"," if inp.lower() == \"quit\":\n"," break\n"," \n"," #getting sentiment analysis value\n"," edu=TextBlob(inp)\n"," sa=edu.sentiment.polarity\n"," print(\"Sentiment Value is : \",sa) \n","\n"," results = model.predict([bag_of_words(inp, words)])[0]\n"," results_index = numpy.argmax(results)\n"," tag = labels[results_index]\n","\n"," if results[results_index] > 0.7:\n"," for tg in data[\"intents\"]:\n"," if tg['tag'] == tag:\n"," responses = tg['bot_response']\n","\n"," print(random.choice(responses))\n","\n"," else:\n"," print(\"I didn't get that, try again.\")\n","\n","chat()"]}],"metadata":{"colab":{"authorship_tag":"ABX9TyMO0aMAMOJF7Y+oufnfsl+s","collapsed_sections":[],"name":"Vet_Bot.ipynb","provenance":[]},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"name":"python"}},"nbformat":4,"nbformat_minor":0}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment