[Attention] Get rid of mla cache alignment (#14842)
Signed-off-by: Lucas Wilkinson <lwilkins@redhat.com>
This commit is contained in:
@@ -827,12 +827,6 @@ def get_dtype_size(dtype: torch.dtype) -> int:
|
||||
return torch.tensor([], dtype=dtype).element_size()
|
||||
|
||||
|
||||
def align_to_256bytes(extent: int, dtype: torch.dtype) -> int:
|
||||
dtype_size = get_dtype_size(dtype)
|
||||
eles_per_256bytes = 256 // dtype_size
|
||||
return round_up(extent, eles_per_256bytes)
|
||||
|
||||
|
||||
# `collections` helpers
|
||||
def is_list_of(
|
||||
value: object,
|
||||
|
||||
Reference in New Issue
Block a user