Skip to content

Commit

Permalink
manual pre-commit
Browse files Browse the repository at this point in the history
  • Loading branch information
jjiangTT committed Mar 6, 2025
1 parent 9f217dc commit 9a6c50d
Show file tree
Hide file tree
Showing 9 changed files with 453 additions and 492 deletions.
4 changes: 3 additions & 1 deletion models/demos/falcon7b_common/tt/model_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,9 @@ def preprocess_weights(weights_to_cache):
layout=tt_layout,
device=mesh_device,
memory_config=model_config[f"{weight_config_str}_MEMCFG"],
mesh_mapper=ttnn.replicate_tensor_to_mesh_mapper(mesh_device) if type(mesh_device) == ttnn.MeshDevice else None,
mesh_mapper=ttnn.replicate_tensor_to_mesh_mapper(mesh_device)
if type(mesh_device) == ttnn.MeshDevice
else None,
cache_file_name=str(path),
preprocess=preprocess_weights,
)
Expand Down
4 changes: 3 additions & 1 deletion models/demos/llama3/tt/llama_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,7 +402,9 @@ def sample_host(tt_input, mesh_device, temperature=0.6, top_p=0.08, on_host=True
layout=ttnn.ROW_MAJOR_LAYOUT,
dtype=ttnn.uint32,
device=None,
mesh_mapper=ttnn.replicate_tensor_to_mesh_mapper(mesh_device) if mesh_device.get_num_devices() > 1 else None,
mesh_mapper=ttnn.replicate_tensor_to_mesh_mapper(mesh_device)
if mesh_device.get_num_devices() > 1
else None,
),
pt_out,
)
Expand Down
1 change: 1 addition & 0 deletions models/demos/t3000/llama2_70b/tt/llama_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
MeshToTensor,
)


class ConcatMesh2DToTensor(MeshToTensor):
def __init__(self, mesh_device, dims, cluster_shape):
self.dims = dims
Expand Down
Loading

0 comments on commit 9a6c50d

Please sign in to comment.