Support FIPS enabled machines with MD5 hashing (#15299)

Signed-off-by: Matthew Vine <32849887+MattTheCuber@users.noreply.github.com>
This commit is contained in:
Matthew Vine
2025-03-26 20:19:46 -04:00
committed by GitHub
parent e74ff409e0
commit 7a6d45bc8a
4 changed files with 36 additions and 19 deletions

View File

@@ -1111,7 +1111,8 @@ class CacheConfig:
factors: list[Any] = []
factors.append(self.cache_dtype)
# `cpu_offload_gb` does not use `torch.compile` yet.
hash_str = hashlib.md5(str(factors).encode()).hexdigest()
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()
return hash_str
def __init__(
@@ -1243,7 +1244,8 @@ class TokenizerPoolConfig:
# no factors to consider.
# this config will not affect the computation graph.
factors: list[Any] = []
hash_str = hashlib.md5(str(factors).encode()).hexdigest()
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()
return hash_str
def __post_init__(self):
@@ -1354,7 +1356,8 @@ class LoadConfig:
# no factors to consider.
# this config will not affect the computation graph.
factors: list[Any] = []
hash_str = hashlib.md5(str(factors).encode()).hexdigest()
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()
return hash_str
def __post_init__(self):
@@ -1674,7 +1677,8 @@ class SchedulerConfig:
# no factors to consider.
# this config will not affect the computation graph.
factors: list[Any] = []
hash_str = hashlib.md5(str(factors).encode()).hexdigest()
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()
return hash_str
def __post_init__(self) -> None:
@@ -1810,7 +1814,8 @@ class DeviceConfig:
# the device/platform information will be summarized
# by torch/vllm automatically.
factors: list[Any] = []
hash_str = hashlib.md5(str(factors).encode()).hexdigest()
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()
return hash_str
def __init__(self, device: str = "auto") -> None:
@@ -1983,7 +1988,8 @@ class SpeculativeConfig:
# no factors to consider.
# spec decode does not use `torch.compile` yet.
factors: list[Any] = []
hash_str = hashlib.md5(str(factors).encode()).hexdigest()
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()
return hash_str
@classmethod
@@ -2358,7 +2364,8 @@ class LoRAConfig:
factors.append(self.lora_extra_vocab_size)
factors.append(self.long_lora_scaling_factors)
factors.append(self.bias_enabled)
hash_str = hashlib.md5(str(factors).encode()).hexdigest()
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()
return hash_str
def __post_init__(self):
@@ -2424,7 +2431,8 @@ class PromptAdapterConfig:
# no factors to consider.
# this config will not affect the computation graph.
factors: list[Any] = []
hash_str = hashlib.md5(str(factors).encode()).hexdigest()
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()
return hash_str
def __post_init__(self):
@@ -2469,7 +2477,8 @@ class MultiModalConfig:
# no factors to consider.
# this config will not affect the computation graph.
factors: list[Any] = []
hash_str = hashlib.md5(str(factors).encode()).hexdigest()
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()
return hash_str
def get_limit_per_prompt(self, modality: str) -> int:
@@ -2535,7 +2544,8 @@ class PoolerConfig:
# no factors to consider.
# this config will not affect the computation graph.
factors: list[Any] = []
hash_str = hashlib.md5(str(factors).encode()).hexdigest()
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()
return hash_str
@staticmethod
@@ -2816,7 +2826,8 @@ class DecodingConfig:
# no factors to consider.
# this config will not affect the computation graph.
factors: list[Any] = []
hash_str = hashlib.md5(str(factors).encode()).hexdigest()
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()
return hash_str
def __post_init__(self):
@@ -2866,7 +2877,8 @@ class ObservabilityConfig:
# no factors to consider.
# this config will not affect the computation graph.
factors: list[Any] = []
hash_str = hashlib.md5(str(factors).encode()).hexdigest()
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()
return hash_str
def __post_init__(self):
@@ -2928,7 +2940,8 @@ class KVTransferConfig(BaseModel):
# no factors to consider.
# this config will not affect the computation graph.
factors: list[Any] = []
hash_str = hashlib.md5(str(factors).encode()).hexdigest()
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()
return hash_str
@classmethod
@@ -3425,7 +3438,8 @@ class VllmConfig:
vllm_factors.append("None")
factors.append(vllm_factors)
hash_str = hashlib.md5(str(factors).encode()).hexdigest()[:10]
hash_str = hashlib.md5(str(factors).encode(),
usedforsecurity=False).hexdigest()[:10]
return hash_str
def pad_for_cudagraph(self, batch_size: int) -> int: