Skip to content

Commit

Permalink
[NNAPI EP] Track skipped initializer usage.
Browse files Browse the repository at this point in the history
  • Loading branch information
edgchen1 committed Jul 6, 2024
1 parent 9ef28f0 commit 2b6ce7b
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,13 @@ DEFINE_ADD_OPERAND_FROM_SCALAR(float, FLOAT32);
#undef DEFINE_ADD_OPERAND_FROM_SCALAR

void ModelBuilder::AddInitializerToSkip(const std::string& tensor_name) {
skipped_initializers_.insert(tensor_name);
// decrement usage count if this is a known initializer.
// For simplicity the OpBuilder::AddInitializersToSkip implementations may call this for arbitrary input names
// without first checking if the value is an initializer.
auto entry = initializer_usage_.find(tensor_name);
if (entry != initializer_usage_.end()) {
entry->second -= 1;
}
}

Status ModelBuilder::Prepare() {
Expand Down Expand Up @@ -87,7 +93,16 @@ static size_t GetPaddedByteSize(size_t size) {
}

void ModelBuilder::PreprocessInitializers() {
const auto& initializers = GetInitializerTensors();

for (const auto& node_unit : node_unit_holder_) {
// find all initializers consumed. AddInitializersToSkip will potentially decrement the usage count.
for (const auto& input : node_unit->Inputs()) {
if (input.node_arg.Exists() && Contains(initializers, input.node_arg.Name())) {
initializer_usage_[input.node_arg.Name()]++;
}
}

if (const auto* op_builder = GetOpBuilder(*node_unit)) {
op_builder->AddInitializersToSkip(*this, *node_unit);
}
Expand Down Expand Up @@ -208,11 +223,16 @@ Status ModelBuilder::RegisterInitializers() {
std::vector<std::tuple<uint32_t, size_t, size_t>> initializers(initializer_size);
size_t sizeAll = 0;

const auto should_skip_initializer = [this](const std::string& name) -> bool {
const auto it = initializer_usage_.find(name);
return it == initializer_usage_.end() || it->second == 0;
};

int i = 0;
for (const auto& pair : initializer_tensors) {
const auto& tensor = *pair.second;
const auto& name = tensor.name();
if (Contains(skipped_initializers_, name))
if (should_skip_initializer(name))
continue;

Shape shape;
Expand Down Expand Up @@ -249,7 +269,7 @@ Status ModelBuilder::RegisterInitializers() {
size_t offset = 0;
for (const auto& pair : initializer_tensors) {
const auto& tensor = *pair.second;
if (Contains(skipped_initializers_, tensor.name()))
if (should_skip_initializer(tensor.name()))
continue;

auto [index, size, padded_size] = initializers[i++];
Expand Down Expand Up @@ -439,10 +459,11 @@ Status ModelBuilder::AddOperandFromPersistMemoryBuffer(
Status ModelBuilder::AddOperations() {
const auto& node_indices = graph_viewer_.GetNodesInTopologicalOrder();
for (const auto node_idx : node_indices) {
LOGS_DEFAULT(VERBOSE) << "Adding node [" << node_idx << "]";
const auto* node(graph_viewer_.GetNode(node_idx));
const NodeUnit& node_unit = GetNodeUnit(node);

LOGS_DEFAULT(VERBOSE) << "Adding node [" << node_unit.Name() << "] at index [" << node_unit.Index() << "]";

// Since we may have NodeUnit with multiple nodes, insert NodeUnit with the first occurrence of
// its node(s) in topological order may cause the incorrect topological order while inserting
// NodeUNits, for example,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ class ModelBuilder {
std::unordered_set<std::string> operands_;
std::unordered_set<std::string> fused_activations_;

std::unordered_set<std::string> skipped_initializers_;
std::unordered_map<std::string, int> initializer_usage_;

// All activation nodes (Relu, Relu1, Relu6) as a map <const NodeUnit*, activation_code>
std::unordered_map<const NodeUnit*, int32_t> activation_node_units_;
Expand Down

0 comments on commit 2b6ce7b

Please sign in to comment.