model_name = "NVIDIA/nv-embed-v2"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModel.from_pretrained(model_name, trust_remote_code=True)
model.eval()
< /code>
Выход: < /p>
---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
Cell In[12], line 3
1 model_name = "NVIDIA/nv-embed-v2"
2 tokenizer = AutoTokenizer.from_pretrained(model_name)
----> 3 model = AutoModel.from_pretrained(model_name, trust_remote_code=True)
4 model.eval()
File ...\AppData\Local\Programs\Python\Python312\Lib\site-packages\transformers\models\auto\auto_factory.py:582, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
579 kwargs["adapter_kwargs"] = adapter_kwargs
581 if has_remote_code and trust_remote_code:
--> 582 model_class = get_class_from_dynamic_module(
583 class_ref, pretrained_model_name_or_path, code_revision=code_revision, **hub_kwargs, **kwargs
584 )
585 _ = hub_kwargs.pop("code_revision", None)
586 # This block handles the case where the user is loading a model with `trust_remote_code=True`
587 # but a library model exists with the same name. We don't want to override the autoclass
588 # mappings in this case, or all future loads of that model will be the remote code model.
File ...\AppData\Local\Programs\Python\Python312\Lib\site-packages\transformers\dynamic_module_utils.py:582, in get_class_from_dynamic_module(class_reference, pretrained_model_name_or_path, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, repo_type, code_revision, **kwargs)
569 # And lastly we get the class inside our newly created module
570 final_module = get_cached_module_file(
571 repo_id,
572 module_file + ".py",
(...)
...
---> 11 from transformers.models.mistral.modeling_mistral import MISTRAL_INPUTS_DOCSTRING
12 from transformers.modeling_outputs import BaseModelOutputWithPast
13 from transformers.modeling_attn_mask_utils import _prepare_4d_attention_mask, _prepare_4d_attention_mask_for_sdpa
ImportError: cannot import name 'MISTRAL_INPUTS_DOCSTRING' from 'transformers.models.mistral.modeling_mistral' (...\AppData\Local\Programs\Python\Python312\Lib\site-packages\transformers\models\mistral\modeling_mistral.py)
Ошибка произошла, когда я пытался использовать модель встраивания NVIDIA/NV-EMBED-V2, я установил указанную версию библиотек, упомянутых на карте модели HuggingChingface. Кто -нибудь знает решение?
File ...\AppData\Local\Programs\Python\Python312\Lib\site-packages\transformers\models\auto\auto_factory.py:582, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs) 579 kwargs["adapter_kwargs"] = adapter_kwargs 581 if has_remote_code and trust_remote_code: --> 582 model_class = get_class_from_dynamic_module( 583 class_ref, pretrained_model_name_or_path, code_revision=code_revision, **hub_kwargs, **kwargs 584 ) 585 _ = hub_kwargs.pop("code_revision", None) 586 # This block handles the case where the user is loading a model with `trust_remote_code=True` 587 # but a library model exists with the same name. We don't want to override the autoclass 588 # mappings in this case, or all future loads of that model will be the remote code model.
File ...\AppData\Local\Programs\Python\Python312\Lib\site-packages\transformers\dynamic_module_utils.py:582, in get_class_from_dynamic_module(class_reference, pretrained_model_name_or_path, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, repo_type, code_revision, **kwargs) 569 # And lastly we get the class inside our newly created module 570 final_module = get_cached_module_file( 571 repo_id, 572 module_file + ".py", (...) ... ---> 11 from transformers.models.mistral.modeling_mistral import MISTRAL_INPUTS_DOCSTRING 12 from transformers.modeling_outputs import BaseModelOutputWithPast 13 from transformers.modeling_attn_mask_utils import _prepare_4d_attention_mask, _prepare_4d_attention_mask_for_sdpa
ImportError: cannot import name 'MISTRAL_INPUTS_DOCSTRING' from 'transformers.models.mistral.modeling_mistral' (...\AppData\Local\Programs\Python\Python312\Lib\site-packages\transformers\models\mistral\modeling_mistral.py) [/code] Ошибка произошла, когда я пытался использовать модель встраивания NVIDIA/NV-EMBED-V2, я установил указанную версию библиотек, упомянутых на карте модели HuggingChingface. Кто -нибудь знает решение?
RuntimeError: Failed to import transformers.models.clip.processing_clip because of the following error (look up to see its traceback):
No such file or directory: 'C:\\Users\\xxx\\AppData\\Local\\Temp\\_MEI135322\\transformers\\__init__.py'