"text_config" missiong for 8B model
#1
by
ce-amtic
- opened
I encountered this error when load the 8B model with lmdeploy v0.9.2.post1.
File "utils/internvl_utils.py", line 60, in batch_generate_with_lmd
_cached_lmdeploy_pipeline = pipeline(
File ".env/miniconda3/envs/lmdeploy/lib/python3.10/site-packages/lmdeploy/api.py", line 83, in pipeline
return pipeline_class(model_path,
File ".env/miniconda3/envs/lmdeploy/lib/python3.10/site-packages/lmdeploy/serve/vl_async_engine.py", line 32, in __init__
super().__init__(model_path, backend=backend, backend_config=backend_config, **kwargs)
File ".env/miniconda3/envs/lmdeploy/lib/python3.10/site-packages/lmdeploy/serve/async_engine.py", line 284, in __init__
self._build_turbomind(model_path=model_path, backend_config=backend_config, **kwargs)
File ".env/miniconda3/envs/lmdeploy/lib/python3.10/site-packages/lmdeploy/serve/async_engine.py", line 338, in _build_turbomind
self.engine = tm.TurboMind.from_pretrained(model_path,
File ".env/miniconda3/envs/lmdeploy/lib/python3.10/site-packages/lmdeploy/turbomind/turbomind.py", line 386, in from_pretrained
return cls(model_path=pretrained_model_name_or_path,
File ".env/miniconda3/envs/lmdeploy/lib/python3.10/site-packages/lmdeploy/turbomind/turbomind.py", line 159, in __init__
self._load_weights(model_source)
File ".env/miniconda3/envs/lmdeploy/lib/python3.10/site-packages/lmdeploy/turbomind/turbomind.py", line 178, in _load_weights
self._tm_model.export()
File ".env/miniconda3/envs/lmdeploy/lib/python3.10/site-packages/lmdeploy/turbomind/deploy/target_model/base.py", line 210, in export
for i, reader in self.input_model.readers():
File ".env/miniconda3/envs/lmdeploy/lib/python3.10/site-packages/lmdeploy/turbomind/deploy/source_model/llama.py", line 113, in readers
reader = self.Reader(param, {}, False, self.model_config, policy=self.policy)
File ".env/miniconda3/envs/lmdeploy/lib/python3.10/site-packages/lmdeploy/turbomind/deploy/source_model/internvl.py", line 64, in __init__
raise ValueError(f'Miss "text_config" in model config: {model_cfg}')
ValueError: Miss "text_config" in model config: None
from lmdeploy import pipeline, TurbomindEngineConfig, ChatTemplateConfig, GenerationConfig
backend_config = TurbomindEngineConfig(
session_len=16384, tp=tp, dp=dp
)
chat_template_config = ChatTemplateConfig(
model_name='internvl2_5'
)
_cached_lmdeploy_pipeline = pipeline(
model_path,
backend_config=backend_config,
chat_template_config=chat_template_config,
)
The same code works pretty well for Internvl3.5 38B model, but cannot load 8B model.
Is there any missing config files or something?
Please use PytorchEngineConfig instead of TurbomindEngineConfig
Please use
PytorchEngineConfiginstead ofTurbomindEngineConfig
Solved the problem. Thank you!
ce-amtic
changed discussion status to
closed