Examples not working
#1
by IDKWhy17 - opened
First Example
# Use a pipeline as a high-level helper
from transformers import pipeline
from google.colab import userdata
userdata.get('HF_TOKEN')
pipe = pipeline("text-generation", model="PORTULAN/gervasio-8b-portuguese-ptpt-decoder-quantized-4bit")
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
/tmp/ipython-input-2885922959.py in <cell line: 0>()
5 userdata.get('HF_TOKEN')
6
----> 7 pipe = pipeline("text-generation", model="PORTULAN/gervasio-8b-portuguese-ptpt-decoder-quantized-4bit")
1 frames
/usr/local/lib/python3.12/dist-packages/transformers/pipelines/base.py in load_model(model, config, model_classes, task, **model_kwargs)
266 for class_name, trace in all_traceback.items():
267 error += f"while loading with {class_name}, an error is thrown:\n{trace}\n"
--> 268 raise ValueError(
269 f"Could not load model {model} with any of the following classes: {class_tuple}. See the original errors:\n\n{error}\n"
270 )
ValueError: Could not load model PORTULAN/gervasio-8b-portuguese-ptpt-decoder-quantized-4bit with any of the following classes: (<class 'transformers.models.auto.modeling_auto.AutoModelForCausalLM'>,). See the original errors:
while loading with AutoModelForCausalLM, an error is thrown:
Traceback (most recent call last):
File "/usr/local/lib/python3.12/dist-packages/transformers/pipelines/base.py", line 232, in load_model
model = model_class.from_pretrained(model, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/transformers/models/auto/auto_factory.py", line 375, in from_pretrained
raise ValueError(
ValueError: Unrecognized configuration class <class 'transformers.models.bit.configuration_bit.BitConfig'> for this kind of AutoModel: AutoModelForCausalLM.
Model type should be one of AfmoeConfig, ApertusConfig, ArceeConfig, AriaTextConfig, BambaConfig, BartConfig, BertConfig, BertGenerationConfig, BigBirdConfig, BigBirdPegasusConfig, BioGptConfig, BitNetConfig, BlenderbotConfig, BlenderbotSmallConfig, BloomConfig, BltConfig, CamembertConfig, LlamaConfig, CodeGenConfig, CohereConfig, Cohere2Config, CpmAntConfig, CTRLConfig, CwmConfig, Data2VecTextConfig, DbrxConfig, DeepseekV2Config, DeepseekV3Config, DiffLlamaConfig, DogeConfig, Dots1Config, ElectraConfig, Emu3Config, ErnieConfig, Ernie4_5Config, Ernie4_5_MoeConfig, Exaone4Config, FalconConfig, FalconH1Config, FalconMambaConfig, FlexOlmoConfig, FuyuConfig, GemmaConfig, Gemma2Config, Gemma3Config, Gemma3TextConfig, Gemma3nConfig, Gemma3nTextConfig, GitConfig, GlmConfig, Glm4Config, Glm4MoeConfig, Glm4MoeLiteConfig, GotOcr2Config, GPT2Config, GPT2Config, GPTBigCodeConfig, GPTNeoConfig, GPTNeoXConfig, GPTNeoXJapaneseConfig, GptOssConfig, GPTJConfig, GraniteConfig, GraniteMoeConfig, GraniteMoeHybridConfig, GraniteMoeSharedConfig, HeliumConfig, HunYuanDenseV1Config, HunYuanMoEV1Config, Jais2Config, JambaConfig, JetMoeConfig, Lfm2Config, Lfm2MoeConfig, LlamaConfig, Llama4Config, Llama4TextConfig, LongcatFlashConfig, MambaConfig, Mamba2Config, MarianConfig, MBartConfig, MegatronBertConfig, MiniMaxConfig, MiniMaxM2Config, MinistralConfig, Ministral3Config, MistralConfig, MixtralConfig, MllamaConfig, ModernBertDecoderConfig, MoshiConfig, MptConfig, MusicgenConfig, MusicgenMelodyConfi...
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.12/dist-packages/transformers/pipelines/base.py", line 248, in load_model
model = model_class.from_pretrained(model, **fp32_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/transformers/models/auto/auto_factory.py", line 375, in from_pretrained
raise ValueError(
ValueError: Unrecognized configuration class <class 'transformers.models.bit.configuration_bit.BitConfig'> for this kind of AutoModel: AutoModelForCausalLM.
Model type should be one of AfmoeConfig, ApertusConfig, ArceeConfig, AriaTextConfig, BambaConfig, BartConfig, BertConfig, BertGenerationConfig, BigBirdConfig, BigBirdPegasusConfig, BioGptConfig, BitNetConfig, BlenderbotConfig, BlenderbotSmallConfig, BloomConfig, BltConfig, CamembertConfig, LlamaConfig, CodeGenConfig, CohereConfig, Cohere2Config, CpmAntConfig, CTRLConfig, CwmConfig, Data2VecTextConfig, DbrxConfig, DeepseekV2Config, DeepseekV3Config, DiffLlamaConfig, DogeConfig, Dots1Config, ElectraConfig, Emu3Config, ErnieConfig, Ernie4_5Config, Ernie4_5_MoeConfig, Exaone4Config, FalconConfig, FalconH1Config, FalconMambaConfig, FlexOlmoConfig, FuyuConfig, GemmaConfig, Gemma2Config, Gemma3Config, Gemma3TextConfig, Gemma3nConfig, Gemma3nTextConfig, GitConfig, GlmConfig, Glm4Config, Glm4MoeConfig, Glm4MoeLiteConfig, GotOcr2Config, GPT2Config, GPT2Config, GPTBigCodeConfig, GPTNeoConfig, GPTNeoXConfig, GPTNeoXJapaneseConfig, GptOssConfig, GPTJConfig, GraniteConfig, GraniteMoeConfig, GraniteMoeHybridConfig, GraniteMoeSharedConfig, HeliumConfig, HunYuanDenseV1Config, HunYuanMoEV1Config, Jais2Config, JambaConfig, JetMoeConfig, Lfm2Config, Lfm2MoeConfig, LlamaConfig, Llama4Config, Llama4TextConfig, LongcatFlashConfig, MambaConfig, Mamba2Config, MarianConfig, MBartConfig, MegatronBertConfig, MiniMaxConfig, MiniMaxM2Config, MinistralConfig, Ministral3Config, MistralConfig, MixtralConfig, MllamaConfig, ModernBertDecoderConfig, MoshiConfig, MptConfig, MusicgenConfig, MusicgenMelodyConfi...
Second Example
# Load model directly
from transformers import AutoModel
from google.colab import userdata
userdata.get('HF_TOKEN')
model = AutoModel.from_pretrained("PORTULAN/gervasio-8b-portuguese-ptpt-decoder-quantized-4bit", dtype="auto")
---------------------------------------------------------------------------
OSError Traceback (most recent call last)
/tmp/ipython-input-3853475789.py in <cell line: 0>()
5 userdata.get('HF_TOKEN')
6
----> 7 model = AutoModel.from_pretrained("PORTULAN/gervasio-8b-portuguese-ptpt-decoder-quantized-4bit", dtype="auto")
2 frames
/usr/local/lib/python3.12/dist-packages/transformers/modeling_utils.py in _get_resolved_checkpoint_files(pretrained_model_name_or_path, variant, gguf_file, use_safetensors, user_agent, is_remote_code, transformers_explicit_filename, download_kwargs)
708 )
709 else:
--> 710 raise OSError(
711 f"{pretrained_model_name_or_path} does not appear to have a file named"
712 f" {_add_variant(WEIGHTS_NAME, variant)} or {_add_variant(SAFE_WEIGHTS_NAME, variant)}."
OSError: PORTULAN/gervasio-8b-portuguese-ptpt-decoder-quantized-4bit does not appear to have a file named pytorch_model.bin or model.safetensors.