Skip to content

Commit

Permalink
#17213: update fused and matmul trace sweep tests
Browse files Browse the repository at this point in the history
  • Loading branch information
bbradelTT committed Jan 28, 2025
1 parent 663757a commit e3547b1
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 13 deletions.
21 changes: 15 additions & 6 deletions tests/sweep_framework/sweeps/fused/layer_norm_traces.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

from typing import Optional, Tuple

import pytest
import torch

import ttnn

from tests.ttnn.utils_for_testing import check_with_pcc, start_measuring_time, stop_measuring_time
Expand Down Expand Up @@ -108,11 +108,7 @@
}


def run(
params,
*,
device,
) -> list:
def run_layer_norm(device, params):
[input_shape, normalized_shape, eps] = params
torch_input_tensor = torch.rand(input_shape, dtype=torch.float32)
torch_weight_tensor = torch.rand(normalized_shape, dtype=torch.float32)
Expand All @@ -131,3 +127,16 @@ def run(
e2e_perf = stop_measuring_time(start_time)
expected_pcc = 0.999
return [check_with_pcc(torch_output_tensor, output_tensor, expected_pcc), e2e_perf]


@pytest.mark.parametrize("params", parameters["default"]["params"])
def test_layer_norm(device, params):
run_layer_norm(device, params)


def run(
params,
*,
device,
) -> list:
return run_layer_norm(device, params)
21 changes: 15 additions & 6 deletions tests/sweep_framework/sweeps/fused/softmax_traces.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

from typing import Optional, Tuple

import pytest
import torch

import ttnn

from tests.ttnn.utils_for_testing import check_with_pcc, start_measuring_time, stop_measuring_time
Expand Down Expand Up @@ -98,11 +98,7 @@
}


def run(
params,
*,
device,
) -> list:
def run_softmax(device, params):
[input_shape, dim, half_to_float] = params
# TODO find out what half_to_float is supposed to mean in the provided traces
torch_input_tensor = torch.rand(input_shape, dtype=torch.float32)
Expand All @@ -116,3 +112,16 @@ def run(
e2e_perf = stop_measuring_time(start_time)
expected_pcc = 0.989
return [check_with_pcc(torch_output_tensor, output_tensor, expected_pcc), e2e_perf]


@pytest.mark.parametrize("params", parameters["default"]["params"])
def test_softmax(device, params):
run_softmax(device, params)


def run(
params,
*,
device,
) -> list:
return run_softmax(device, params)
2 changes: 1 addition & 1 deletion tests/sweep_framework/sweeps/matmul/short/matmul_traces.py
Original file line number Diff line number Diff line change
Expand Up @@ -2648,7 +2648,7 @@ def run_matmul(device, params, core_grid, dtype, test_bias):
half = int(count / 2)
shape0 = params[0:half]
shape1 = params[half:count]
shape2 = [shape1[-1]]
shape2 = [1 if i < (half - 1) else shape1[-1] for i in range(half)]
torch_input_tensor0 = torch.rand(shape0, dtype=torch.float32)
torch_input_tensor1 = torch.rand(shape1, dtype=torch.float32)
torch_input_tensor2 = torch.rand(shape2, dtype=torch.float32)
Expand Down

0 comments on commit e3547b1

Please sign in to comment.