Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[onert/train] Add LayerScopeManager into TensorManager #14047

Merged
merged 1 commit into from
Oct 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 22 additions & 1 deletion runtime/onert/backend/train/TensorManager.cc
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ TensorManager::TensorManager(const std::shared_ptr<TensorRegistry> &reg, uint32_
_trainable_mgr{new TrainableMemoryManager(optim_vars_count)},
_back_prop_mgr{new MemoryManager()}, _gradient_mgr{new MemoryManager()},
// TODO Find a suitable planner of disposable tensors to reduce peak memory usage
_disposable_back_prop_mgr{new DisposableMemoryManager()}, _tensors{reg}
_disposable_back_prop_mgr{new DisposableMemoryManager()},
_layer_scope_mgr{new LayerScopeMemoryManager()}, _tensors{reg}
{
// DO NOTHING
}
Expand Down Expand Up @@ -106,6 +107,12 @@ void TensorManager::allocateDisposableBackPropTensors()
std::string{"DISPOSABLE BACK_PROP TENSOR "});
}

void TensorManager::allocateLayerScopeTensors()
{
allocateMemory(_layer_scope_mgr.get(), _tensors->layerscope_tensors(),
std::string{" LAYERSCOPE TENSOR "});
}

void TensorManager::claimNonConstPlan(const ir::OperandIndex &index)
{
auto tensor = _tensors->getNonConstTensor(index);
Expand Down Expand Up @@ -187,6 +194,20 @@ void TensorManager::releaseDisposableBackPropPlan(const DisposableTensorIndex &i
_disposable_back_prop_mgr->releasePlan(index);
}

void TensorManager::claimLayerScopePlan(const LayerScopeTensorIndex &index)
{
const auto tensor = _tensors->getLayerScopeTensor(index);

auto size = alignedSize(tensor->total_size(), _align);
_layer_scope_mgr->claimPlan(index, size);
}

void TensorManager::releaseLayerScopePlan(const LayerScopeTensorIndex &index)
{
assert(_tensors->getLayerScopeTensor(index));
_layer_scope_mgr->releasePlan(index);
}

} // namespace train
} // namespace backend
} // namespace onert
7 changes: 4 additions & 3 deletions runtime/onert/backend/train/TensorManager.h
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ class TensorManager
void allocateBackPropTensors();
void allocateGradientTensors();
void allocateDisposableBackPropTensors();
void allocateLayerScopeTensors();
// TODO Add member functions to deallocate tensors

void claimNonConstPlan(const ir::OperandIndex &ind);
Expand All @@ -61,16 +62,16 @@ class TensorManager
void releaseGradientPlan(const ir::OperandIndex &ind);
void claimDisposableBackPropPlan(const DisposableTensorIndex &ind);
void releaseDisposableBackPropPlan(const DisposableTensorIndex &ind);
// TODO Add member functions related to LayerScopeMemoryManager
void claimLayerScopePlan(const LayerScopeTensorIndex &ind);
void releaseLayerScopePlan(const LayerScopeTensorIndex &ind);

private:
std::unique_ptr<MemoryManager> _nonconst_mgr;
std::unique_ptr<TrainableMemoryManager> _trainable_mgr;
std::unique_ptr<MemoryManager> _back_prop_mgr;
std::unique_ptr<MemoryManager> _gradient_mgr;
std::unique_ptr<DisposableMemoryManager> _disposable_back_prop_mgr;
// TODO: enable _layer_scope_mgr
// std::unique_ptr<LayerScopeMemoryManager> _layer_scope_mgr;
std::unique_ptr<LayerScopeMemoryManager> _layer_scope_mgr;
const std::shared_ptr<TensorRegistry> _tensors;
};

Expand Down