[Bugfix] support tie_word_embeddings for all models (#5724)

This commit is contained in:
Zijian Hu
2024-08-19 20:00:04 -07:00
committed by GitHub
parent 0df7ec0b2d
commit f4fc7337bf
30 changed files with 90 additions and 16 deletions

View File

@@ -496,6 +496,10 @@ class MiniCPMVBaseModel(nn.Module, SupportsMultiModal):
quant_config: Optional[QuantizationConfig] = None,
):
super().__init__()
# All MiniCPM-V models disable `tie_word_embeddings` but
# `PretrainedConfig.tie_word_embeddings` defaults to True; we cannot
# check `tie_word_embeddings` until vLLM integrate MiniCPM-V model
# and config class
self.config = config
self.multimodal_config = multimodal_config