| from fastapi import FastAPI, Request, Form |
| from fastapi.templating import Jinja2Templates |
| import csv |
| from datetime import datetime |
| from transformers import AutoModelForSeq2SeqLM, AutoTokenizer |
|
|
| |
| |
|
|
| app = FastAPI() |
| templates = Jinja2Templates(directory="templates") |
|
|
| |
| model_name = "EleutherAI/gpt-j-6B" |
| tokenizer = AutoTokenizer.from_pretrained(model_name) |
| model = AutoModelForSeq2SeqLM.from_pretrained(model_name) |
|
|
| async def generate_conversation(prompt): |
| try: |
| |
| inputs = tokenizer(prompt, return_tensors="pt") |
|
|
| |
| output = model.generate(**inputs) |
| conversation = tokenizer.decode(output[0], skip_special_tokens=True) |
|
|
| return conversation |
| except Exception as e: |
| return f"Error: {str(e)}" |
|
|
| def save_to_csv(prompt, conversation): |
| timestamp = datetime.now().strftime("%Y%m%d%H%M%S") |
| filename = f"info.csv" |
|
|
| with open(filename, mode='w', newline='', encoding='utf-8') as csv_file: |
| csv_writer = csv.writer(csv_file) |
| csv_writer.writerow(['Prompt', 'Generated Conversation']) |
| csv_writer.writerow([prompt, conversation]) |
|
|
| return filename |
|
|
| @app.get("/") |
| def read_form(request: Request): |
| return templates.TemplateResponse("index.html", {"request": request}) |
|
|
| @app.post("/") |
| async def generate_and_display(request: Request, prompt: str = Form(...)): |
| conversation = await generate_conversation(prompt) |
| csv_filename = save_to_csv(prompt, conversation) |
| return templates.TemplateResponse("index.html", {"request": request, "prompt": prompt, "conversation": conversation, "csv_filename": csv_filename}) |
|
|
| if __name__ == "__main__": |
| import uvicorn |
|
|
| uvicorn.run(app, host="127.0.0.1", port=8000) |
|
|