Skip to content

Commit

Permalink
mmca + multi modal cross attn
Browse files Browse the repository at this point in the history
  • Loading branch information
Kye committed Oct 11, 2023
1 parent c82a810 commit 1f56e29
Showing 1 changed file with 10 additions and 7 deletions.
17 changes: 10 additions & 7 deletions zeta/nn/attention/__init__.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,23 @@
"""Zeta Halo"""

# attentions
from zeta.nn.attention.attend import Attend
from zeta.nn.attention.cross_attention import CrossAttention
from zeta.nn.attention.flash_attention import FlashAttention
from zeta.nn.attention.flash_attention2 import FlashAttentionTwo
from zeta.nn.attention.local_attention import LocalAttention
from zeta.nn.attention.local_attention_mha import LocalMHA
from zeta.nn.attention.multihead_attention import MultiheadAttention
from zeta.nn.attention.multiquery_attention import MultiQueryAttention
from zeta.nn.attention.cross_attention import CrossAttention
from zeta.nn.attention.flash_attention import FlashAttention
from zeta.nn.attention.mgqa import MGQA

# from zeta.nn.attention.spatial_linear_attention import SpatialLinearAttention

from zeta.nn.attention.mixture_attention import (
MixtureOfAttention,
MixtureOfAutoregressiveAttention,
)
from zeta.nn.attention.attend import Attend
from zeta.nn.attention.mgqa import MGQA
from zeta.nn.attention.multi_modal_causal_attention import (
MultiModalCausalAttention,
SimpleMMCA,
)
from zeta.nn.attention.multi_modal_cross_attn import MultiModalCrossAttention
from zeta.nn.attention.multihead_attention import MultiheadAttention
from zeta.nn.attention.multiquery_attention import MultiQueryAttention

0 comments on commit 1f56e29

Please sign in to comment.