question and answer model ipynb file

parent 53c8618b
This diff is collapsed.
import torch
import numpy as np
import pandas as pd
from torch.utils.data import Dataset
from transformers import TrainingArguments, Trainer
from sklearn.model_selection import train_test_split
from transformers import OpenAIGPTTokenizer, OpenAIGPTModel
data_path = 'data/qna-summarization.xlsx'
df = pd.read_excel(data_path)
Answers = df['Answer'].tolist()
Question = df['Question'].tolist()
tokenizer = OpenAIGPTTokenizer.from_pretrained('openai-gpt')
tokenizer.add_special_tokens({'pad_token': '[PAD]'})
model = OpenAIGPTModel.from_pretrained('openai-gpt')
question_encoding = tokenizer(Question, return_tensors='pt', padding=True, truncation=True)
answer_encoding = tokenizer(Answers, return_tensors='pt', padding=True, truncation=True)
class QnADataset(Dataset):
def __init__(self, question_encoding, answer_encoding):
self.question_encoding = question_encoding
self.answer_encoding = answer_encoding
def __getitem__(self, idx):
return self.question_encoding[idx], self.answer_encoding[idx]
def __len__(self):
return len(self.question_encoding)
dataset = QnADataset(question_encoding, answer_encoding)
training_args = TrainingArguments(
output_dir='Question n Answering', # output directory
num_train_epochs=1, # total # of training epochs
per_device_train_batch_size=100, # batch size per device during training
per_device_eval_batch_size=100, # batch size for evaluation
warmup_steps=500, # number of warmup steps for learning rate scheduler
weight_decay=0.01, # strength of weight decay
logging_dir='Question n Answering/logs', # directory for storing logs
logging_steps=10
)
trainer = Trainer(
model=model, # the instantiated 🤗 Transformers model to be trained
args=training_args, # training arguments, defined above
train_dataset=dataset # evaluation dataset
)
trainer.train()
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment