[Misc] Remove deprecated names (#10817)

Signed-off-by: DarkLight1337 <tlleungac@connect.ust.hk>
This commit is contained in:
Cyrus Leung
2024-12-03 14:52:57 +08:00
committed by GitHub
parent ef51831ee8
commit 3257d449fa
10 changed files with 31 additions and 102 deletions

View File

@@ -6,6 +6,7 @@ from typing import (Any, ClassVar, Dict, List, Optional, Sequence, Tuple, Type,
Union, cast, overload)
from tqdm import tqdm
from typing_extensions import deprecated
from vllm import envs
from vllm.beam_search import (BeamSearchInstance, BeamSearchOutput,
@@ -256,6 +257,7 @@ class LLM:
tokenizer_group.tokenizer = get_cached_tokenizer(tokenizer)
@overload # LEGACY: single (prompt + optional token ids)
@deprecated("'prompt_token_ids' will become part of 'prompts")
def generate(
self,
prompts: str,
@@ -268,6 +270,7 @@ class LLM:
...
@overload # LEGACY: multi (prompt + optional token ids)
@deprecated("'prompt_token_ids' will become part of 'prompts")
def generate(
self,
prompts: List[str],
@@ -280,6 +283,7 @@ class LLM:
...
@overload # LEGACY: single (token ids + optional prompt)
@deprecated("'prompt_token_ids' will become part of 'prompts")
def generate(
self,
prompts: Optional[str] = None,
@@ -293,6 +297,7 @@ class LLM:
...
@overload # LEGACY: multi (token ids + optional prompt)
@deprecated("'prompt_token_ids' will become part of 'prompts")
def generate(
self,
prompts: Optional[List[str]] = None,
@@ -306,6 +311,7 @@ class LLM:
...
@overload # LEGACY: single or multi token ids [pos-only]
@deprecated("'prompt_token_ids' will become part of 'prompts")
def generate(
self,
prompts: None,
@@ -671,6 +677,7 @@ class LLM:
)
@overload # LEGACY: single (prompt + optional token ids)
@deprecated("'prompt_token_ids' will become part of 'prompts")
def encode(
self,
prompts: str,
@@ -683,6 +690,7 @@ class LLM:
...
@overload # LEGACY: multi (prompt + optional token ids)
@deprecated("'prompt_token_ids' will become part of 'prompts")
def encode(
self,
prompts: List[str],
@@ -695,6 +703,7 @@ class LLM:
...
@overload # LEGACY: single (token ids + optional prompt)
@deprecated("'prompt_token_ids' will become part of 'prompts")
def encode(
self,
prompts: Optional[str] = None,
@@ -708,6 +717,7 @@ class LLM:
...
@overload # LEGACY: multi (token ids + optional prompt)
@deprecated("'prompt_token_ids' will become part of 'prompts")
def encode(
self,
prompts: Optional[List[str]] = None,
@@ -721,6 +731,7 @@ class LLM:
...
@overload # LEGACY: single or multi token ids [pos-only]
@deprecated("'prompt_token_ids' will become part of 'prompts")
def encode(
self,
prompts: None,