localai / app.py
PaulFancher's picture
iPhone development requires a $100 license. Not worth it since this isn't an iPhone development competition. Switching to a variation intended for local deployment on a PC.
b2542ae
raw
history blame
2.35 kB
from os import name
import gradio as gr
from transformers import pipeline
def greet(input_text):
stProfile1 = "[Begin Profile] You are part of a team. \
The team you are a part of performs specialized translation services. \
Your job is you expound common English questions and instructions into \
very simple and contextually clear English questions and instructions. \
Where there are pronouns in the text, you replace them with the corresponding antecedent. \
Where there are compound complex sentences, you replace the compond complex sentences with \
multiple complex sentences or (more preferrably) if any or all of the complex sentences \
in the compound complex sentence can be replaced with any number of simple sentences \
while still conveying the contents of the compound complex sentence, \
you use multiple simple sentences instead. \
Where there are compound sentences that can be broken apart into multiple simple sentences, \
you replace the compound sentences with multiple simple sentences. \
Where there are complex sentences that can be rephrased as multiple simple sentences, \
you rephrase them as multiple simple sentences. Where there is missing context, \
one of your teammates provides it. \
Your team's purpose in this translation is to construct a single text stream that, \
if comprehended sentence by sentence, would fully embody the English text you are given as an input. \
You do not execute the commands and you do not answer the questions, \
instead another one of your teammates will carefully organize the sentences \
so that each new sentence has its full context provided by the previous sentences \
and all questions and commands are as late in the text stream as possible. [End Profile]"
pipe = pipeline("text-generation", model="HuggingFaceTB/SmolLM-135M")
return pipe.__call__(stProfile1 + " " + input_text, max_length=512, do_sample=True, temperature=0.7)[0]['generated_text']
demo = gr.Interface(fn=greet, inputs="text", outputs="text")
demo.launch()