Skip to content

Commit

Permalink
debug
Browse files Browse the repository at this point in the history
  • Loading branch information
Edenzzzz committed Aug 7, 2024
1 parent 5c4b445 commit 6bf9936
Showing 1 changed file with 12 additions and 1 deletion.
13 changes: 12 additions & 1 deletion colossalai/booster/plugin/moe_hybrid_parallel_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,18 @@
get_param_info,
reinitialize_optimizer,
)
from colossalai.checkpoint_io import MoECheckpointIO

try:
from colossalai.checkpoint_io import MoECheckpointIO
except ImportError as e:
import os

# see what's in this folder
path = "/opt/conda/envs/pytorch/lib/python3.9/site-packages/colossalai/checkpoint_io/"
print(f"Files in {path}: {os.listdir(path)}")
print(f"content of __init__.py: {open(path + '__init__.py').read()}")
raise e

from colossalai.cluster.process_group_mesh import ProcessGroupMesh
from colossalai.interface import ModelWrapper, OptimizerWrapper
from colossalai.interface.optimizer import DistributedOptim
Expand Down

0 comments on commit 6bf9936

Please sign in to comment.