Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[onert/backend] Add LayerScopeTensor interface to TensorBuilder #14198

Merged
merged 2 commits into from
Oct 14, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 56 additions & 0 deletions runtime/onert/backend/train/TensorBuilder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,27 @@ void TensorBuilder::registerDisposableBackwardTensorInfo(const DisposableTensorI
_disposable_backprops.add(index);
}

void TensorBuilder::registerLayerScopeTensor(const LayerScopeTensorIndex &index,
std::shared_ptr<LayerScopeTensor> &tensor)
zetwhite marked this conversation as resolved.
Show resolved Hide resolved
{
const auto op_idx = index.op_index();

const auto pair = _operation_to_layerscope.find(op_idx);
if (pair == _operation_to_layerscope.end())
{
util::Set<LayerScopeTensorIndex> tensor_indices;
tensor_indices.add(index);
_operation_to_layerscope[op_idx] = tensor_indices;
}
else
{
assert(!pair->second.contains(index));
pair->second.add(index);
}

_tensor_reg->setLayerScopeTensor(index, tensor);
}

void TensorBuilder::notifyFirstUse(const ir::OperandIndex &index)
{
// TODO Support momory plan
Expand Down Expand Up @@ -155,6 +176,16 @@ void TensorBuilder::notifyDisposableBackPropLastUse(const DisposableTensorIndex
_tensor_mgr->releaseDisposableBackPropPlan(index);
}

void TensorBuilder::notifyLayerScopeFirstUse(const LayerScopeTensorIndex &index)
{
_tensor_mgr->claimLayerScopePlan(index);
}

void TensorBuilder::notifyLayerScopeLastUse(const LayerScopeTensorIndex &index)
{
_tensor_mgr->releaseLayerScopePlan(index);
}

bool TensorBuilder::isRegistered(const ir::OperandIndex &index) const
{
return _tensor_info_map.find(index) != _tensor_info_map.end();
Expand All @@ -170,6 +201,29 @@ bool TensorBuilder::isRegisteredDisposableBackwardTensor(const DisposableTensorI
return _disposable_backprops.contains(index);
}

bool TensorBuilder::isRegisteredLayerScopeTensor(const ir::OperationIndex &index) const
{
const auto pair = _operation_to_layerscope.find(index);
return (pair != _operation_to_layerscope.end());
}

const util::Set<LayerScopeTensorIndex> &
TensorBuilder::getRegisteredLayerScopeTensorIndex(const ir::OperationIndex &index) const
{
const auto pair = _operation_to_layerscope.find(index);
assert(pair != _operation_to_layerscope.end());

return pair->second;
}

LayerScopeTensorLifeTime
TensorBuilder::getLayerScopeTensorLifeTime(const LayerScopeTensorIndex &index) const
{
const auto &ls_tensors = _tensor_reg->layerscope_tensors();
const auto &tensor = ls_tensors.at(index);
return tensor->lifetime();
}

void TensorBuilder::allocate(void)
{
_tensor_mgr->allocateNonConstTensors();
Expand All @@ -183,6 +237,8 @@ void TensorBuilder::allocateBackward(void)
_tensor_mgr->allocateDisposableBackPropTensors();
}

void TensorBuilder::allocateLayerScope(void) { _tensor_mgr->allocateLayerScopeTensors(); }

} // namespace train
} // namespace backend
} // namespace onert
14 changes: 14 additions & 0 deletions runtime/onert/backend/train/TensorBuilder.h
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,12 @@
#define __ONERT_BACKEND_TRAIN_TENSOR_BUILDER_H__

#include "DisposableTensorIndex.h"
#include "LayerScopeTensorIndex.h"
#include "TensorManager.h"
#include "TensorRegistry.h"
#include "util/Set.h"

#include <ir/OperationIndexMap.h>
#include <exec/train/optimizer/Optimizer.h>

namespace onert
Expand Down Expand Up @@ -55,20 +57,31 @@ class TensorBuilder
void registerDisposableBackwardTensorInfo(const DisposableTensorIndex &index,
const ir::OperandInfo &info);

void registerLayerScopeTensor(const LayerScopeTensorIndex &index,
std::shared_ptr<LayerScopeTensor> &info);

// TODO Support memory plan of all tensors
void notifyFirstUse(const ir::OperandIndex &);
void notifyLastUse(const ir::OperandIndex &);
void notifyBackwardFirstUse(const ir::OperandIndex &);
void notifyBackwardLastUse(const ir::OperandIndex &);
void notifyDisposableBackPropFirstUse(const DisposableTensorIndex &);
void notifyDisposableBackPropLastUse(const DisposableTensorIndex &);
void notifyLayerScopeFirstUse(const LayerScopeTensorIndex &);
void notifyLayerScopeLastUse(const LayerScopeTensorIndex &);

bool isRegistered(const ir::OperandIndex &) const;
bool isRegisteredBackward(const ir::OperandIndex &) const;
bool isRegisteredDisposableBackwardTensor(const DisposableTensorIndex &index) const;
bool isRegisteredLayerScopeTensor(const ir::OperationIndex &) const;

const util::Set<LayerScopeTensorIndex> &
getRegisteredLayerScopeTensorIndex(const ir::OperationIndex &) const;
ragmani marked this conversation as resolved.
Show resolved Hide resolved
LayerScopeTensorLifeTime getLayerScopeTensorLifeTime(const LayerScopeTensorIndex &) const;
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These will be used in TensorPlanner : https://github.com/Samsung/ONE/pull/13486/files#r1795202407


void allocate(void);
void allocateBackward(void);
void allocateLayerScope(void);

private:
const std::shared_ptr<TensorRegistry> _tensor_reg;
Expand All @@ -77,6 +90,7 @@ class TensorBuilder
ir::OperandIndexMap<ir::OperandInfo> _backward_tensor_info_map;
ir::OperandIndexMap<bool> _as_constants;
util::Set<DisposableTensorIndex> _disposable_backprops;
ir::OperationIndexMap<util::Set<LayerScopeTensorIndex>> _operation_to_layerscope;
Copy link
Contributor Author

@zetwhite zetwhite Oct 10, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Each operation can have several LayerScopeTensors.
So, This _operation_to_layerscope saves the operation_index - layerscope_tensor_index relation - which will be used for planning.

const exec::train::optimizer::Optimizer *_optimizer;
};

Expand Down