[Frontend] Customizable RoPE theta (#5197)

This commit is contained in:
sasha0552
2024-06-11 17:42:26 +00:00
committed by GitHub
parent 00e6a2dc53
commit dcbf4286af
5 changed files with 27 additions and 8 deletions

View File

@@ -162,7 +162,7 @@ class LLMEngine:
"Initializing an LLM engine (v%s) with config: "
"model=%r, speculative_config=%r, tokenizer=%r, "
"skip_tokenizer_init=%s, tokenizer_mode=%s, revision=%s, "
"rope_scaling=%r, tokenizer_revision=%s, "
"rope_scaling=%r, rope_theta=%r, tokenizer_revision=%s, "
"trust_remote_code=%s, dtype=%s, max_seq_len=%d, "
"download_dir=%r, load_format=%s, tensor_parallel_size=%d, "
"disable_custom_all_reduce=%s, quantization=%s, "
@@ -177,6 +177,7 @@ class LLMEngine:
model_config.tokenizer_mode,
model_config.revision,
model_config.rope_scaling,
model_config.rope_theta,
model_config.tokenizer_revision,
model_config.trust_remote_code,
model_config.dtype,