| import gradio as gr |
| from transformers import AutoTokenizer, AutoModelForCausalLM, TextGenerationPipeline |
|
|
| |
| model = AutoModelForCausalLM.from_pretrained('cbauer/groupchatGPT', trust_remote_code=True, ignore_mismatched_sizes=True) |
| tokenizer = AutoTokenizer.from_pretrained('cbauer/groupchatGPT') |
|
|
| |
| generator = TextGenerationPipeline(model=model, tokenizer=tokenizer) |
|
|
| def generate(text): |
| result = generator(text, max_length=30, num_return_sequences=1) |
| return result[0]["generated_text"] |
|
|
| examples = [ |
| ["###Chase Hello"], |
| ["### Jake Whats up"], |
| ] |
|
|
| demo = gr.Interface( |
| fn=generate, |
| inputs=gr.components.Textbox(lines=5, label="Input Text"), |
| outputs=gr.components.Textbox(label="Generated Text"), |
| examples=examples |
| ) |
|
|
| demo.launch() |
|
|