Update app.py
Browse files
app.py
CHANGED
|
@@ -1,9 +1,11 @@
|
|
| 1 |
import gradio as gr, os, huggingface_hub as hf
|
| 2 |
|
| 3 |
def go(m, q):
|
| 4 |
-
c = hf.InferenceClient(
|
| 5 |
s = c.chat.completions.create(model=m, messages=[{"role":"user","content":q}], stream=True)
|
| 6 |
-
for i in s:
|
|
|
|
|
|
|
| 7 |
|
| 8 |
models = [
|
| 9 |
"deepseek-ai/DeepSeek-V3.1",
|
|
@@ -18,4 +20,4 @@ gr.Interface(
|
|
| 18 |
inputs=[gr.Dropdown(models), gr.Textbox(label="Ask")],
|
| 19 |
outputs=gr.Textbox(label="Reply"),
|
| 20 |
title="Multi-Model Chat"
|
| 21 |
-
).launch()
|
|
|
|
| 1 |
import gradio as gr, os, huggingface_hub as hf
|
| 2 |
|
| 3 |
def go(m, q):
|
| 4 |
+
c = hf.InferenceClient(api_key=os.environ["HF_TOKEN"])
|
| 5 |
s = c.chat.completions.create(model=m, messages=[{"role":"user","content":q}], stream=True)
|
| 6 |
+
for i in s:
|
| 7 |
+
if i.choices and i.choices[0].delta.content:
|
| 8 |
+
yield i.choices[0].delta.content
|
| 9 |
|
| 10 |
models = [
|
| 11 |
"deepseek-ai/DeepSeek-V3.1",
|
|
|
|
| 20 |
inputs=[gr.Dropdown(models), gr.Textbox(label="Ask")],
|
| 21 |
outputs=gr.Textbox(label="Reply"),
|
| 22 |
title="Multi-Model Chat"
|
| 23 |
+
).launch(share=True)
|