Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

assumption of torch.initial_seed function accepting seed arg in DeepSpeedAccelerator abstract class is incorrect #5569

Merged
merged 13 commits into from
Jun 12, 2024
2 changes: 1 addition & 1 deletion accelerator/abstract_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def manual_seed_all(self, seed):
...

@abc.abstractmethod
def initial_seed(self, seed):
def initial_seed(self):
...

@abc.abstractmethod
Expand Down
4 changes: 2 additions & 2 deletions accelerator/cpu_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,8 +100,8 @@ def manual_seed(self, seed):
def manual_seed_all(self, seed):
return torch.manual_seed(seed)

def initial_seed(self, seed):
return torch.initial_seed(seed)
def initial_seed(self):
return torch.initial_seed()

def default_generator(self, device_index):
return torch.default_generator
Expand Down
4 changes: 2 additions & 2 deletions accelerator/cuda_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,8 +99,8 @@ def manual_seed(self, seed):
def manual_seed_all(self, seed):
return torch.cuda.manual_seed_all(seed)

def initial_seed(self, seed):
return torch.cuda.initial_seed(seed)
def initial_seed(self):
return torch.cuda.initial_seed()

def default_generator(self, device_index):
return torch.cuda.default_generators[device_index]
Expand Down
6 changes: 3 additions & 3 deletions accelerator/hpu_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,13 +74,13 @@ def get_rng_state(self, device_index=None):
return self.hpu.random.get_rng_state()

def manual_seed(self, seed):
self.hpu.random.manual_seed(seed)
return self.hpu.random.manual_seed(seed)

def manual_seed_all(self, seed):
self.hpu.random.manual_seed_all(seed)

def initial_seed(self, seed):
self.hpu.random.initial_seed(seed)
def initial_seed(self):
return self.hpu.random.initial_seed()

def default_generator(self, device_index):
return self.hpu.random.default_generators[device_index]
Expand Down
2 changes: 1 addition & 1 deletion accelerator/mps_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def manual_seed_all(self, seed):
def seed(self):
return torch.mps.seed()

def initial_seed(self, seed):
def initial_seed(self):
return

def default_generator(self, device_index):
Expand Down
4 changes: 2 additions & 2 deletions accelerator/npu_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,8 @@ def manual_seed(self, seed):
def manual_seed_all(self, seed):
return torch.npu.manual_seed_all(seed)

def initial_seed(self, seed):
return torch.npu.initial_seed(seed)
def initial_seed(self):
return torch.npu.initial_seed()

def default_generator(self, device_index):
return torch.npu.default_generators[device_index]
Expand Down
4 changes: 2 additions & 2 deletions accelerator/xpu_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,8 @@ def manual_seed(self, seed):
def manual_seed_all(self, seed):
return torch.xpu.manual_seed_all(seed)

def initial_seed(self, seed):
return torch.xpu.initial_seed(seed)
def initial_seed(self):
return torch.xpu.initial_seed()

def default_generator(self, device_index):
return torch.xpu.default_generators[device_index]
Expand Down
Loading