| from transformers import AutoTokenizer, TFAutoModelForSequenceClassification |
| import streamlit as st |
| import os |
| import tensorflow as tf |
| from absl import logging |
|
|
| |
| tokenizer = AutoTokenizer.from_pretrained("snunlp/KR-FinBert-SC") |
| model = TFAutoModelForSequenceClassification.from_pretrained("snunlp/KR-FinBert-SC") |
|
|
| |
| os.environ['TF_ENABLE_ONEDNN_OPTS'] = '0' |
|
|
| |
| logging.set_verbosity(logging.INFO) |
| logging.use_absl_handler() |
|
|
| |
| print("TensorFlow ๋ฒ์ :", tf.__version__) |
| print("์ฌ์ฉ ๊ฐ๋ฅํ ์ฅ์น:", tf.config.list_physical_devices()) |
|
|
| |
| st.title("Hello, Streamlit!") |
| st.write("This is a sample Streamlit app.") |
|
|
| |
| input_text = st.text_input("Enter some text:") |
| if st.button("Analyze"): |
| try: |
| inputs = tokenizer(input_text, return_tensors="tf") |
| outputs = model(**inputs) |
| st.write("Model Output:", outputs.logits.numpy().tolist()) |
| except Exception as e: |
| st.error(f"Error during model inference: {e}") |
|
|