[Model] Support math-shepherd-mistral-7b-prm model (#9697)

Signed-off-by: Went-Liang <wenteng_liang@163.com>
This commit is contained in:
Went-Liang
2024-10-31 00:33:42 +08:00
committed by GitHub
parent cc98f1e079
commit 81f09cfd80
14 changed files with 312 additions and 62 deletions

View File

@@ -100,11 +100,27 @@ _EMBEDDING_MODELS = {
"Qwen2ForRewardModel": ("qwen2_rm", "Qwen2ForRewardModel"),
"Qwen2ForSequenceClassification": (
"qwen2_cls", "Qwen2ForSequenceClassification"),
"LlamaForCausalLM": ("llama", "LlamaForCausalLM"),
"Phi3ForCausalLM": ("phi3", "Phi3ForCausalLM"),
"DeciLMForCausalLM": ("decilm", "DeciLMForCausalLM"),
# [Multimodal]
"LlavaNextForConditionalGeneration": ("llava_next", "LlavaNextForConditionalGeneration"), # noqa: E501
"Phi3VForCausalLM": ("phi3v", "Phi3VForCausalLM"),
}
def add_embedding_models(base_models, embedding_models):
with_pooler_method_models = {}
embedding_models_name = embedding_models.keys()
for name, (path, arch) in base_models.items():
if arch in embedding_models_name:
with_pooler_method_models[name] = (path, arch)
return with_pooler_method_models
_EMBEDDING_MODELS = {
**add_embedding_models(_TEXT_GENERATION_MODELS, _EMBEDDING_MODELS),
**_EMBEDDING_MODELS,
}
_MULTIMODAL_MODELS = {
# [Decoder-only]
"Blip2ForConditionalGeneration": ("blip2", "Blip2ForConditionalGeneration"),