Add collective_rpc to llm engine (#16999)

Signed-off-by: Yinghai Lu <yinghai@thinkingmachines.ai>
This commit is contained in:
Yinghai Lu
2025-04-24 13:16:52 -07:00
committed by GitHub
parent 6d0df0ebeb
commit fe92176321
2 changed files with 29 additions and 0 deletions

View File

@@ -492,6 +492,17 @@ class AsyncLLM(EngineClient):
"""Prevent an adapter from being evicted."""
return await self.engine_core.pin_lora_async(lora_id)
async def collective_rpc(self,
method: str,
timeout: Optional[float] = None,
args: tuple = (),
kwargs: Optional[dict] = None):
"""
Perform a collective RPC call to the given path.
"""
return await self.engine_core.collective_rpc_async(
method, timeout, args, kwargs)
@property
def is_running(self) -> bool:
# Is None before the loop is started.