Skip to content

Commit

Permalink
Clear output
Browse files Browse the repository at this point in the history
  • Loading branch information
cmikeh2 committed Dec 16, 2023
1 parent c1e90a3 commit 880417e
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -75,4 +75,4 @@ def finalize(self) -> torch.Tensor:
transposed_experts.append(fused_expert)

stacked_experts = torch.stack(transposed_experts, dim=0)
return self.inference_model.transform_moe_mlp_2_param(stacked_experts)
return self.inference_model.transform_moe_mlp_1_param(stacked_experts)
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class MixtralTransformerContainer(LayerContainer):
class MixtralNonTransformerContainer(LayerContainer):

word_emb: EmbeddingParameter
word_unembed: EmbeddingParameter
word_unembed: UnembedParameter
final_norm: NormParameter

PARAM_MAPPING = {
Expand Down

0 comments on commit 880417e

Please sign in to comment.