Skip to content

Commit 1671309

Browse files
committed
Make mistral-common dependency optional
1 parent 8cf6b42 commit 1671309

File tree

2 files changed

+35
-11
lines changed

2 files changed

+35
-11
lines changed

convert_hf_to_gguf.py

Lines changed: 35 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
import logging
88
import argparse
99
import contextlib
10+
import importlib.util
1011
import json
1112
import os
1213
import re
@@ -29,12 +30,29 @@
2930
sys.path.insert(1, str(Path(__file__).parent / 'gguf-py'))
3031
import gguf
3132
from gguf.vocab import MistralTokenizerType, MistralVocab
32-
from mistral_common.tokens.tokenizers.base import TokenizerVersion
33-
from mistral_common.tokens.tokenizers.multimodal import DATASET_MEAN, DATASET_STD
34-
from mistral_common.tokens.tokenizers.tekken import Tekkenizer
35-
from mistral_common.tokens.tokenizers.sentencepiece import (
36-
SentencePieceTokenizer,
37-
)
33+
34+
if importlib.util.find_spec("mistral_common") is not None:
35+
from mistral_common.tokens.tokenizers.base import TokenizerVersion # pyright: ignore[reportMissingImports]
36+
from mistral_common.tokens.tokenizers.multimodal import DATASET_MEAN as _MISTRAL_COMMON_DATASET_MEAN, DATASET_STD as _MISTRAL_COMMON_DATASET_STD # pyright: ignore[reportMissingImports]
37+
from mistral_common.tokens.tokenizers.tekken import Tekkenizer # pyright: ignore[reportMissingImports]
38+
from mistral_common.tokens.tokenizers.sentencepiece import ( # pyright: ignore[reportMissingImports]
39+
SentencePieceTokenizer,
40+
)
41+
42+
_mistral_common_installed = True
43+
_mistral_import_error_msg = ""
44+
else:
45+
_MISTRAL_COMMON_DATASET_MEAN = (0.48145466, 0.4578275, 0.40821073)
46+
_MISTRAL_COMMON_DATASET_STD = (0.26862954, 0.26130258, 0.27577711)
47+
48+
_mistral_common_installed = False
49+
TokenizerVersion = None
50+
Tekkenizer = None
51+
SentencePieceTokenizer = None
52+
_mistral_import_error_msg = (
53+
"Mistral format requires `mistral-common` to be installed. Please run "
54+
"`pip install mistral-common[image,audio]` to install it."
55+
)
3856

3957

4058
logger = logging.getLogger("hf-to-gguf")
@@ -107,6 +125,9 @@ def __init__(self, dir_model: Path, ftype: gguf.LlamaFileType, fname_out: Path,
107125
type(self) is MmprojModel:
108126
raise TypeError(f"{type(self).__name__!r} should not be directly instantiated")
109127

128+
if self.is_mistral_format and not _mistral_common_installed:
129+
raise ImportError(_mistral_import_error_msg)
130+
110131
self.dir_model = dir_model
111132
self.ftype = ftype
112133
self.fname_out = fname_out
@@ -1363,8 +1384,8 @@ def set_gguf_parameters(self):
13631384
self.gguf_writer.add_vision_head_count(self.find_vparam(["num_attention_heads"]))
13641385

13651386
# preprocessor config
1366-
image_mean = DATASET_MEAN if self.is_mistral_format else self.preprocessor_config["image_mean"]
1367-
image_std = DATASET_STD if self.is_mistral_format else self.preprocessor_config["image_std"]
1387+
image_mean = _MISTRAL_COMMON_DATASET_MEAN if self.is_mistral_format else self.preprocessor_config["image_mean"]
1388+
image_std = _MISTRAL_COMMON_DATASET_STD if self.is_mistral_format else self.preprocessor_config["image_std"]
13681389

13691390
self.gguf_writer.add_vision_image_mean(image_mean)
13701391
self.gguf_writer.add_vision_image_std(image_std)
@@ -2033,6 +2054,9 @@ def __init__(self, *args, **kwargs):
20332054
self.hparams["num_attention_heads"] = self.hparams.get("num_attention_heads", 32)
20342055

20352056
def _set_vocab_mistral(self):
2057+
if not _mistral_common_installed:
2058+
raise ImportError(_mistral_import_error_msg)
2059+
20362060
vocab = MistralVocab(self.dir_model)
20372061
logger.info(
20382062
f"Converting tokenizer {vocab.tokenizer_type} of size {vocab.vocab_size}."
@@ -9212,7 +9236,7 @@ class MistralModel(LlamaModel):
92129236

92139237
@staticmethod
92149238
def get_community_chat_template(vocab: MistralVocab, templates_dir: Path, is_mistral_format: bool):
9215-
assert TokenizerVersion is not None, "mistral_common is not installed"
9239+
assert TokenizerVersion is not None and Tekkenizer is not None and SentencePieceTokenizer is not None, _mistral_import_error_msg
92169240
assert isinstance(vocab.tokenizer, (Tekkenizer, SentencePieceTokenizer)), (
92179241
f"Expected Tekkenizer or SentencePieceTokenizer, got {type(vocab.tokenizer)}"
92189242
)
@@ -9594,6 +9618,8 @@ def main() -> None:
95949618
fname_out = ModelBase.add_prefix_to_filename(fname_out, "mmproj-")
95959619

95969620
is_mistral_format = args.mistral_format
9621+
if is_mistral_format and not _mistral_common_installed:
9622+
raise ImportError(_mistral_import_error_msg)
95979623
disable_mistral_community_chat_template = args.disable_mistral_community_chat_template
95989624

95999625
with torch.inference_mode():

requirements/requirements-convert_hf_to_gguf.txt

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
mistral-common>=1.8.3
2-
31
-r ./requirements-convert_legacy_llama.txt
42
--extra-index-url https://download.pytorch.org/whl/cpu
53

0 commit comments

Comments
 (0)