| |
| |
| |
| |
| |
| |
| |
|
|
|
|
| import streamlit as st |
| from langchain.chat_models import ChatOpenAI |
| from langchain.schema import HumanMessage |
|
|
| st.set_page_config(page_title="Chat", |
| page_icon=":chat:") |
| st.header("💬 Hugging Chat 💬") |
|
|
| col1, col2 = st.columns([1,1]) |
|
|
| with col1: |
| option_llm = st.selectbox( |
| "Model", |
| ('gpt-4', |
| 'gpt-3.5-turbo') |
| ) |
|
|
| def get_question(): |
| input_text = st.text_area(label="Your question ...", |
| placeholder="Ask me anything ...", |
| key="question_text", label_visibility="collapsed") |
| return input_text |
|
|
| question_text = get_question() |
| if question_text and len(question_text) > 1: |
| output = "" |
| agent = ChatOpenAI(model_name=option_llm, temperature=0.5) |
| response = agent([HumanMessage(content=question_text)]) |
| print(f"> {response}") |
|
|
| if response and response.content: |
| output = response.content |
| height = min(2*len(output), 280) |
| st.text_area(label="In response ...", |
| value=output, height=height) |