Spaces:
Configuration error
Configuration error
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 3 |
|
| 4 |
-
#
|
| 5 |
-
model_name = "
|
| 6 |
model = AutoModelForCausalLM.from_pretrained(model_name)
|
| 7 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 8 |
|
|
@@ -18,7 +18,7 @@ def chatbot_response(user_input):
|
|
| 18 |
new_user_input_ids = tokenizer.encode(user_input + tokenizer.eos_token, return_tensors='pt')
|
| 19 |
|
| 20 |
# Get the response from the model
|
| 21 |
-
bot_output = model.generate(new_user_input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id)
|
| 22 |
|
| 23 |
# Decode the response and return
|
| 24 |
bot_output_text = tokenizer.decode(bot_output[:, new_user_input_ids.shape[-1]:][0], skip_special_tokens=True)
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 3 |
|
| 4 |
+
# Load the satvikag/chatbot model and tokenizer from Hugging Face
|
| 5 |
+
model_name = "satvikag/chatbot"
|
| 6 |
model = AutoModelForCausalLM.from_pretrained(model_name)
|
| 7 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 8 |
|
|
|
|
| 18 |
new_user_input_ids = tokenizer.encode(user_input + tokenizer.eos_token, return_tensors='pt')
|
| 19 |
|
| 20 |
# Get the response from the model
|
| 21 |
+
bot_output = model.generate(new_user_input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id, no_repeat_ngram_size=2)
|
| 22 |
|
| 23 |
# Decode the response and return
|
| 24 |
bot_output_text = tokenizer.decode(bot_output[:, new_user_input_ids.shape[-1]:][0], skip_special_tokens=True)
|