Skip to content

Commit

Permalink
#3965: restructure directories of bert and bloom functional models
Browse files Browse the repository at this point in the history
  • Loading branch information
arakhmati committed Nov 28, 2023
1 parent 823c740 commit 69a7314
Show file tree
Hide file tree
Showing 13 changed files with 16 additions and 553 deletions.
543 changes: 0 additions & 543 deletions models/experimental/functional_bert/test_functional_bert.py

This file was deleted.

6 changes: 3 additions & 3 deletions tests/ttnn/integration_tests/bert/test_bert.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,14 @@

import ttnn

from models.experimental.functional_bert.ttnn_functional_bert import (
from models.experimental.functional_bert.tt.ttnn_functional_bert import (
ttnn_bert_for_question_answering,
)

from models.experimental.functional_bert.ttnn_optimized_functional_bert import (
from models.experimental.functional_bert.tt.ttnn_optimized_functional_bert import (
ttnn_optimized_bert_for_question_answering,
)
from models.experimental.functional_bert.torch_functional_bert import (
from models.experimental.functional_bert.reference.torch_functional_bert import (
torch_bert_for_question_answering,
)
from ttnn.model_preprocessing import (
Expand Down
7 changes: 5 additions & 2 deletions tests/ttnn/integration_tests/bert/test_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,11 @@

import ttnn

from models.experimental.functional_bert.ttnn_functional_bert import ttnn_multi_head_attention, ttnn_feedforward
from models.experimental.functional_bert.torch_functional_bert import torch_multi_head_attention, torch_feedforward
from models.experimental.functional_bert.tt.ttnn_functional_bert import ttnn_multi_head_attention, ttnn_feedforward
from models.experimental.functional_bert.reference.torch_functional_bert import (
torch_multi_head_attention,
torch_feedforward,
)
from models.utility_functions import torch_random

from tests.ttnn.utils_for_testing import assert_with_pcc, torch_random
Expand Down
4 changes: 2 additions & 2 deletions tests/ttnn/integration_tests/bert/test_feedforward.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@

import ttnn

from models.experimental.functional_bert.torch_functional_bert import torch_feedforward as torch_model
from models.experimental.functional_bert.ttnn_functional_bert import ttnn_feedforward as ttnn_model
from models.experimental.functional_bert.reference.torch_functional_bert import torch_feedforward as torch_model
from models.experimental.functional_bert.tt.ttnn_functional_bert import ttnn_feedforward as ttnn_model
from models.utility_functions import torch_random

from tests.ttnn.utils_for_testing import assert_with_pcc
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,10 @@

import ttnn

from models.experimental.functional_bert.torch_functional_bert import torch_multi_head_attention as torch_model
from models.experimental.functional_bert.ttnn_functional_bert import ttnn_multi_head_attention as ttnn_model
from models.experimental.functional_bert.reference.torch_functional_bert import (
torch_multi_head_attention as torch_model,
)
from models.experimental.functional_bert.tt.ttnn_functional_bert import ttnn_multi_head_attention as ttnn_model
from models.utility_functions import torch_random

from tests.ttnn.utils_for_testing import assert_with_pcc
Expand Down
3 changes: 2 additions & 1 deletion tests/ttnn/integration_tests/bloom/test_functional_bloom.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@

# SPDX-License-Identifier: Apache-2.0

from models.experimental.functional_bloom import torch_functional_bloom, ttnn_functional_bloom
from models.experimental.functional_bloom.reference import torch_functional_bloom
from models.experimental.functional_bloom.tt import ttnn_functional_bloom
from transformers import BloomForCausalLM, BloomTokenizerFast
import pytest
import torch
Expand Down

0 comments on commit 69a7314

Please sign in to comment.