Skip to content

Commit

Permalink
[onert/backend] Introduce LayerScopeMemoryManager
Browse files Browse the repository at this point in the history
This PR introduces LayerScopeMemoryManager.
This Manager will be added to TensorManager and used to allocate LayerScopeTensors.

ONE-DCO-1.0-Signed-off-by: seunghui youn <[email protected]>

draft : Samsung#13486
for : Samsung#13282
  • Loading branch information
zetwhite committed Sep 13, 2024
1 parent 2a53b2b commit 7e60143
Show file tree
Hide file tree
Showing 3 changed files with 59 additions and 1 deletion.
36 changes: 36 additions & 0 deletions runtime/onert/backend/train/MemoryManager.cc
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,42 @@ uint8_t *DisposableMemoryManager::getBuffer(const DisposableTensorIndex &ind) co
return _mem_alloc->base() + mem_blk.offset;
}

LayerScopeMemoryManager::LayerScopeMemoryManager() : _mem_planner{createMemoryPlanner()}
{
// DO NOTHING
}

basic::IMemoryPlanner<LayerScopeTensorIndex> *LayerScopeMemoryManager::createMemoryPlanner()
{
auto planner_id = util::getConfigString(util::config::CPU_MEMORY_PLANNER);
return MemoryPlannerFactory<LayerScopeTensorIndex>::get().create(planner_id);
}

void LayerScopeMemoryManager::allocate(void)
{
_mem_alloc = std::make_shared<basic::Allocator>(_mem_planner->capacity());
assert(_mem_alloc->base());
}

uint8_t *LayerScopeMemoryManager::getBuffer(const LayerScopeTensorIndex &ind) const
{
assert(_mem_planner->memory_plans().find(ind) != _mem_planner->memory_plans().end());
const auto &mem_blk = _mem_planner->memory_plans().at(ind);
return _mem_alloc->base() + mem_blk.offset;
}

void LayerScopeMemoryManager::deallocate(void) { _mem_alloc->release(); }

void LayerScopeMemoryManager::claimPlan(const LayerScopeTensorIndex &ind, uint32_t size)
{
_mem_planner->claim(ind, size);
}

void LayerScopeMemoryManager::releasePlan(const LayerScopeTensorIndex &ind)
{
_mem_planner->release(ind);
}

} // namespace train
} // namespace backend
} // namespace onert
21 changes: 20 additions & 1 deletion runtime/onert/backend/train/MemoryManager.h
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
#include <backend/basic/MemoryManager.h>

#include "DisposableTensorIndex.h"
#include "LayerScopeTensorIndex.h"

namespace onert
{
Expand Down Expand Up @@ -67,7 +68,25 @@ class DisposableMemoryManager
std::shared_ptr<basic::Allocator> _mem_alloc;
};

// TODO: Add LayerScopeMemoryManager using MemoryPlannerFactory<LayerScopeTensorIndex>
class LayerScopeMemoryManager
{
public:
LayerScopeMemoryManager();

void allocate(void);
uint8_t *getBuffer(const LayerScopeTensorIndex &ind) const;
void deallocate(void);

void claimPlan(const LayerScopeTensorIndex &ind, uint32_t size);
void releasePlan(const LayerScopeTensorIndex &ind);

private:
basic::IMemoryPlanner<LayerScopeTensorIndex> *createMemoryPlanner();

private:
std::shared_ptr<basic::IMemoryPlanner<LayerScopeTensorIndex>> _mem_planner;
std::shared_ptr<basic::Allocator> _mem_alloc;
};

} // namespace train
} // namespace backend
Expand Down
3 changes: 3 additions & 0 deletions runtime/onert/backend/train/TensorManager.h
Original file line number Diff line number Diff line change
Expand Up @@ -61,13 +61,16 @@ class TensorManager
void releaseGradientPlan(const ir::OperandIndex &ind);
void claimDisposableBackPropPlan(const DisposableTensorIndex &ind);
void releaseDisposableBackPropPlan(const DisposableTensorIndex &ind);
// TODO Add member functions related to LayerScopeMemoryManager

private:
std::unique_ptr<MemoryManager> _nonconst_mgr;
std::unique_ptr<TrainableMemoryManager> _trainable_mgr;
std::unique_ptr<MemoryManager> _back_prop_mgr;
std::unique_ptr<MemoryManager> _gradient_mgr;
std::unique_ptr<DisposableMemoryManager> _disposable_back_prop_mgr;
// TODO: enable _layer_scope_mgr
// std::unique_ptr<LayerScopeMemoryManager> _layer_scope_mgr;
const std::shared_ptr<TensorRegistry> _tensors;
};

Expand Down

0 comments on commit 7e60143

Please sign in to comment.