| import gradio as gr |
| from transformers import pipeline |
| from transformers import Conversation |
|
|
| pipe = pipeline("conversational", model="PygmalionAI/pygmalion-6b") |
| conversation = Conversation() |
| conversation.add_message({"role": "assistant", "content": "How can I help you?"}) |
| memory = [] |
|
|
| def run_conversation(message, history): |
| |
| memory.append({"role": "user", "content": message}) |
| print(history) |
|
|
| if len(history) > 0: |
| for i in history: |
| conversation.add_message({"role":"user", "content":i[0]}) |
| conversation.add_message({"role": "assistant", "content": i[1]}) |
| for i in memory: |
| conversation.add_message(i) |
| print(conversation) |
|
|
| pipe(conversation) |
| return conversation.generated_responses[-1] |
|
|
| demo = gr.ChatInterface( |
| run_conversation, |
| chatbot = gr.Chatbot(height=500), |
| textbox = gr.Textbox(placeholder="Chat with me!", scale=7), |
| title = "Test", |
| description="Chat with me!", |
| examples=["hello"] |
| ) |
|
|
| if __name__ == "__main__": |
| demo.queue().launch() |
|
|
|
|