|
|
| from transformers import AutoModelForCausalLM, AutoTokenizer |
| import torch |
|
|
| torch_device = "cuda" if torch.cuda.is_available() else "cpu" |
|
|
| |
| model_name = "mrm8488/distilroberta-finetuned-financial-news-sentiment-analysis" |
|
|
| tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
|
| model = AutoModelForCausalLM.from_pretrained(model_name, pad_token_id=tokenizer.eos_token_id).to(torch_device) |
|
|
| model_inputs = tokenizer('bad boy you ', return_tensors='pt').to(torch_device) |
|
|
| |
| output = model(**model_inputs).logits.argmax(axis=1) |
|
|
| print(tokenizer.decode(output[0],skip_special_tokens=True)) |
|
|