Skip to content

Commit

Permalink
Revert "add broadcast op to micro_perf. (bytedance#60)" (bytedance#61)
Browse files Browse the repository at this point in the history
This reverts commit 21faa3f.
  • Loading branch information
suisiyuan authored Apr 2, 2024
1 parent 21faa3f commit 210eecc
Show file tree
Hide file tree
Showing 5 changed files with 1 addition and 48 deletions.
4 changes: 0 additions & 4 deletions byte_micro_perf/backends/GPU/backend_gpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,10 +102,6 @@ def alltoall(self):
self.setup_2d_group()
self.op = AllToAllOp(self.group)

def broadcast(self):
self.setup_2d_group()
self.op = BroadcastOp(self.group)

def host2device(self):
self.op = Host2DeviceOp(torch.device("cuda"))

Expand Down
3 changes: 0 additions & 3 deletions byte_micro_perf/backends/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,9 +113,6 @@ def reducescatter(self):
def alltoall(self):
pass

def broadcast(self):
pass

def host2device(self):
pass

Expand Down
10 changes: 0 additions & 10 deletions byte_micro_perf/backends/module_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,16 +201,6 @@ def forward(self, in_tensors_list, out_tensors_list):
return True


class BroadcastOp(torch.nn.Module):
def __init__(self, group):
super().__init__()
self.group = group

def forward(self, input_tensors):
dist.broadcast(input_tensors, 0, self.group)
return True


class Device2HostOp(torch.nn.Module):
def __init__(self):
super().__init__()
Expand Down
2 changes: 1 addition & 1 deletion byte_micro_perf/core/perf_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def start_engine(self) -> None:
output_dir = os.path.abspath("reports/" + self.backend_type)
os.makedirs(output_dir, exist_ok=True)

if self.args.task in ["allreduce", "allgather", "reducescatter", "alltoall", "broadcast"]:
if self.args.task in ["allreduce", "allgather", "reducescatter", "alltoall"]:
for group in self.workload["group"]:
mp.spawn(fn=self.init_process, args=(group,), nprocs=group)
else:
Expand Down
30 changes: 0 additions & 30 deletions byte_micro_perf/workloads/broadcast.json

This file was deleted.

0 comments on commit 210eecc

Please sign in to comment.