код:
:
Код: Выделить всё
from transformers import AutoModelForCausalLM, BitsAndBytesConfig
quantization_config = BitsAndBytesConfig(load_in_4bit=True, bnb_4bit_compute_dtype=torch.bfloat16)
model_4bit = AutoModelForCausalLM.from_pretrained(
"meta-llama/Llama-2-7b-hf",
device_map='auto',
quantization_config=quantization_config, token = secret_value_0,
)
---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
in ()
2
3 quantization_config = BitsAndBytesConfig(load_in_4bit=True, bnb_4bit_compute_dtype=torch.bfloat16)
----> 4 model_4bit = AutoModelForCausalLM.from_pretrained(
5 "meta-llama/Llama-2-7b-hf",
6 device_map='auto',
/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py in from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
562 elif type(config) in cls._model_mapping.keys():
563 model_class = _get_model_class(config, cls._model_mapping)
--> 564 return model_class.from_pretrained(
565 pretrained_model_name_or_path, *model_args, config=config, **hub_kwargs, **kwargs
566 )
/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py in from_pretrained(cls, pretrained_model_name_or_path, config, cache_dir, ignore_mismatched_sizes, force_download, local_files_only, token, revision, use_safetensors, weights_only, *model_args, **kwargs)
3667
3668 if hf_quantizer is not None:
-> 3669 hf_quantizer.validate_environment(
3670 torch_dtype=torch_dtype,
3671 from_tf=from_tf,
/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_bnb_4bit.py in validate_environment(self, *args, **kwargs)
72 )
73 if not is_bitsandbytes_available():
---> 74 raise ImportError(
75 "Using `bitsandbytes` 4-bit quantization requires the latest version of bitsandbytes: `pip install -U bitsandbytes`"
76 )
ImportError: Using `bitsandbytes` 4-bit quantization requires the latest version of bitsandbytes: `pip install -U bitsandbytes`
< /code>
Конкретные версии < /p>
У меня уже есть самая последняя версия BitsAndbytes! Я понимаю, что BitsAndbytes был устарел от https://github.com/huggingface/transfor ... ndbytes.py и перешел к интеграции, я все еще не могу понять. Пожалуйста, помогите !!!
Подробнее здесь: https://stackoverflow.com/questions/795 ... bytes-from