Skip to content

Commit

Permalink
[ROCM] Exclude flash attention from hipify (#21091)
Browse files Browse the repository at this point in the history
Exclude flash attention sub-directory from hipify.
  • Loading branch information
tianleiwu authored Jun 19, 2024
1 parent 6e742c4 commit 01279d8
Showing 1 changed file with 4 additions and 3 deletions.
7 changes: 4 additions & 3 deletions cmake/onnxruntime_rocm_hipify.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@ find_package(Python3 COMPONENTS Interpreter REQUIRED)

# GLOB pattern of file to be excluded
set(contrib_ops_excluded_files
"bert/cutlass_fmha/*"
"bert/fastertransformer_decoder_attention/*"
"bert/flash_attention/*"
"bert/tensorrt_fused_multihead_attention/*"
"bert/attention.cc"
"bert/attention.h"
"bert/attention_impl.cu"
Expand All @@ -17,7 +21,6 @@ set(contrib_ops_excluded_files
"bert/decoder_masked_multihead_attention.cc"
"bert/decoder_masked_self_attention.h"
"bert/decoder_masked_self_attention.cc"
"bert/fastertransformer_decoder_attention/*"
"bert/multihead_attention.cc"
"bert/multihead_attention.h"
"bert/relative_attn_bias.cc"
Expand All @@ -28,8 +31,6 @@ set(contrib_ops_excluded_files
"bert/skip_layer_norm.h"
"bert/skip_layer_norm_impl.cu"
"bert/skip_layer_norm_impl.h"
"bert/cutlass_fmha/*"
"bert/tensorrt_fused_multihead_attention/*"
"bert/transformer_common.h"
"bert/transformer_common.cc"
"bert/packed_attention.h"
Expand Down

0 comments on commit 01279d8

Please sign in to comment.