| import gradio as gr |
| from transformers import AutoModelForCausalLM, AutoTokenizer |
|
|
| |
| model = AutoModelForCausalLM.from_pretrained("your-huggingface-username/lora_model") |
| tokenizer = AutoTokenizer.from_pretrained("your-huggingface-username/lora_model") |
|
|
| |
| def generate_response(user_input): |
| inputs = tokenizer(user_input, return_tensors="pt").input_ids |
| outputs = model.generate(inputs, max_new_tokens=128, do_sample=True) |
| response = tokenizer.decode(outputs[0], skip_special_tokens=True) |
| return response |
|
|
| |
| interface = gr.Interface(fn=generate_response, inputs="text", outputs="text", |
| title="CIED Yönetimi İçin Model", |
| description="Bir klinik durumu değerlendirin ve modelin önerilerini alın.") |
|
|
| |
| interface.launch() |