Skip to content

Commit

Permalink
[Graph] add inplace setting through layer property
Browse files Browse the repository at this point in the history
- now you can set in-place flag through layer property of tensor
operation layers.
- rename "initializeInPlaceType" func to "initializeInPlace". now
"is_inplace" property is set in that function, too.
- in some layers, support_backwarding flag may be changed by the
in-place setting.

**Self evaluation:**
1. Build test:   [X]Passed [ ]Failed [ ]Skipped
2. Run test:     [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Seungbaek Hong <[email protected]>
  • Loading branch information
baek2sm committed Nov 14, 2024
1 parent 9d43feb commit 4c3af20
Show file tree
Hide file tree
Showing 23 changed files with 168 additions and 96 deletions.
2 changes: 1 addition & 1 deletion nntrainer/graph/network_graph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -655,7 +655,7 @@ NetworkGraph::canExecuteInPlace(const std::shared_ptr<LayerNode> &lnode) {
return InPlaceType::RESTRICTING;
}

InPlaceType inplace_type = lnode->initializeInPlaceType();
InPlaceType inplace_type = lnode->initializeInPlace();
/** Set inplace_type based on the input connections */
switch (inplace_type) {
/** A case where it cannot operate in-place */
Expand Down
1 change: 1 addition & 0 deletions nntrainer/layers/add_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
*
*/

#include "common_properties.h"
#include <add_layer.h>
#include <nntrainer_error.h>
#include <nntrainer_log.h>
Expand Down
22 changes: 20 additions & 2 deletions nntrainer/layers/add_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ class AddLayer : public BinaryOperationLayer {
/**
* @brief Constructor of Add Layer
*/
AddLayer() : BinaryOperationLayer(), add_props(props::Print()) {}
AddLayer() :
BinaryOperationLayer(), add_props(props::Print(), props::InPlaceProp()) {}

/**
* @brief Move constructor of Add Layer.
Expand Down Expand Up @@ -74,6 +75,23 @@ class AddLayer : public BinaryOperationLayer {
*/
bool supportBackwarding() const final { return true; };

/**
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
InPlaceType initializeInPlace() final {
if (std::get<props::InPlaceProp>(add_props).empty() ||
std::get<props::InPlaceProp>(add_props).get()) {
is_inplace = true;
} else {
is_inplace = false;
}
if (!supportInPlace())
return InPlaceType::NONE;
else
return InPlaceType::NON_RESTRICTING;
}

/**
* @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods
* method)
Expand All @@ -91,7 +109,7 @@ class AddLayer : public BinaryOperationLayer {
*/
const std::string getType() const final { return AddLayer::type; }

std::tuple<props::Print> add_props;
std::tuple<props::Print, props::InPlaceProp> add_props;

inline static const std::string type = "add";
};
Expand Down
14 changes: 9 additions & 5 deletions nntrainer/layers/bn_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,15 @@ class BatchNormalizationLayer : public Layer {
*/
bool supportBackwarding() const override { return true; }

/**
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
InPlaceType initializeInPlace() final {
is_inplace = true;
return InPlaceType::NON_RESTRICTING;
}

using Layer::setProperty;

/**
Expand All @@ -108,11 +117,6 @@ class BatchNormalizationLayer : public Layer {
*/
void setProperty(const std::vector<std::string> &values) override;

/**
* @copydoc Layer::supportInPlace()
*/
bool supportInPlace() const override { return true; }

/**
* @copydoc Layer::setBatch(RunLayerContext &context, unsigned int batch)
*/
Expand Down
8 changes: 6 additions & 2 deletions nntrainer/layers/cl_layers/reshape_cl.h
Original file line number Diff line number Diff line change
Expand Up @@ -87,9 +87,13 @@ class ReshapeLayerCl : public Layer {
bool supportBackwarding() const override { return false; };

/**
* @copydoc Layer::supportInPlace()
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
bool supportInPlace() const override { return true; }
InPlaceType initializeInPlace() final {
is_inplace = true;
return InPlaceType::NON_RESTRICTING;
}

/**
* @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods
Expand Down
10 changes: 10 additions & 0 deletions nntrainer/layers/common_properties.h
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,16 @@ class TensorDimension : public TensorDimProperty {
using prop_tag = dimension_prop_tag; /**< property type */
};

/**
* @brief Inplace operation property
*
*/
class InPlaceProp : public nntrainer::Property<bool> {
public:
static constexpr const char *key = "inplace"; /**< unique key to access */
using prop_tag = bool_prop_tag; /**< property type */
};

/**
* @brief trainable property, use this to set and check how if certain layer is
* trainable
Expand Down
30 changes: 27 additions & 3 deletions nntrainer/layers/divide_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,10 @@ class DivideLayer : public BinaryOperationLayer {
/**
* @brief Constructor of Divide Layer
*/
DivideLayer() : BinaryOperationLayer(), divide_props(props::Print()) {}
DivideLayer() :
BinaryOperationLayer(),
divide_props(props::Print(), props::InPlaceProp()),
support_backwarding(true) {}

/**
* @brief Destructor of Divide Layer
Expand Down Expand Up @@ -72,7 +75,27 @@ class DivideLayer : public BinaryOperationLayer {
/**
* @copydoc bool supportBackwarding() const
*/
bool supportBackwarding() const final { return true; };
bool supportBackwarding() const final { return support_backwarding; };

/**
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
InPlaceType initializeInPlace() final {
if (std::get<props::InPlaceProp>(divide_props).empty() ||
!std::get<props::InPlaceProp>(divide_props).get()) {
is_inplace = false;
support_backwarding = true;
} else {
is_inplace = true;
support_backwarding = false;
}

if (!supportInPlace())
return InPlaceType::NONE;
else
return InPlaceType::NON_RESTRICTING;
}

/**
* @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods
Expand All @@ -91,7 +114,8 @@ class DivideLayer : public BinaryOperationLayer {
*/
const std::string getType() const final { return DivideLayer::type; };

std::tuple<props::Print> divide_props;
std::tuple<props::Print, props::InPlaceProp> divide_props;
bool support_backwarding; /**< support backwarding */

inline static const std::string type = "divide";
};
Expand Down
11 changes: 1 addition & 10 deletions nntrainer/layers/dropout.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@ class DropOutLayer : public Layer {
* @brief Constructor of DropOut Layer
*/
DropOutLayer(float dropout = 0.0) :
Layer(),
dropout_rate(props::DropOutRate(dropout)),
epsilon(1e-3) {}
Layer(), dropout_rate(props::DropOutRate(dropout)), epsilon(1e-3) {}

/**
* @brief Destructor of DropOut Layer
Expand Down Expand Up @@ -89,13 +87,6 @@ class DropOutLayer : public Layer {
*/
void setProperty(const std::vector<std::string> &values) override;

/**
* @copydoc Layer::supportInPlace()
*
* @todo Enable in-place support once supported by manager
*/
bool supportInPlace() const override { return false; }

inline static const std::string type = "dropout";

private:
Expand Down
10 changes: 4 additions & 6 deletions nntrainer/layers/flatten_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -60,14 +60,12 @@ class FlattenLayer : public ReshapeLayer {
void setProperty(const std::vector<std::string> &values) override;

/**
* @brief Initialize the in-place type of the layer
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
InPlaceType initializeInPlaceType() final {
if (!supportInPlace())
return InPlaceType::NONE;
else
return InPlaceType::RESTRICTING;
InPlaceType initializeInPlace() final {
is_inplace = true;
return InPlaceType::RESTRICTING;
}

/**
Expand Down
15 changes: 4 additions & 11 deletions nntrainer/layers/identity_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -70,19 +70,12 @@ class IdentityLayer final : public Layer {
bool supportBackwarding() const override { return true; };

/**
* @copydoc Layer::supportInPlace()
*/
bool supportInPlace() const override { return true; }

/**
* @brief Initialize the in-place type of the layer
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
InPlaceType initializeInPlaceType() final {
if (!supportInPlace())
return InPlaceType::NONE;
else
return InPlaceType::RESTRICTING;
InPlaceType initializeInPlace() final {
is_inplace = true;
return InPlaceType::RESTRICTING;
}

/**
Expand Down
4 changes: 1 addition & 3 deletions nntrainer/layers/input_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,7 @@ namespace nntrainer {
static constexpr size_t SINGLE_INOUT_IDX = 0;

InputLayer::InputLayer() :
Layer(),
input_props(props::Normalization(), props::Standardization()),
is_inplace(true) {}
Layer(), input_props(props::Normalization(), props::Standardization()) {}

void InputLayer::setProperty(const std::vector<std::string> &values) {
auto remain_props = loadProperties(values, input_props);
Expand Down
9 changes: 6 additions & 3 deletions nntrainer/layers/input_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,13 @@ class InputLayer : public Layer {
bool supportBackwarding() const override { return false; };

/**
* @copydoc Layer::supportInPlace()
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
bool supportInPlace() const override { return is_inplace; }
InPlaceType initializeInPlace() final {
is_inplace = true;
return InPlaceType::NON_RESTRICTING;
}

/**
* @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods
Expand All @@ -105,7 +109,6 @@ class InputLayer : public Layer {

private:
std::tuple<props::Normalization, props::Standardization> input_props;
bool is_inplace = true;
};
} // namespace nntrainer

Expand Down
9 changes: 6 additions & 3 deletions nntrainer/layers/layer_devel.h
Original file line number Diff line number Diff line change
Expand Up @@ -258,18 +258,18 @@ class Layer {
* @details all layers default to out of place execution
* @note all layers default to out of place execution
*/
virtual bool supportInPlace() const { return false; }
virtual bool supportInPlace() const { return is_inplace; }

/**
* @brief Initialize the in-place type of the layer
* @brief Initialize the in-place settings of the layer
* @details If it is a layer that supports in-place, the default in-place type
* is NONE_RESTRICTING, but if there is a RESTRICTING type among the input
* layers, it is set to NONE in the network_graph.cpp.
* Layers with exceptional behavior such as No-Operation layers should
* override this function.
* @return InPlaceType
*/
virtual InPlaceType initializeInPlaceType() {
virtual InPlaceType initializeInPlace() {
if (!supportInPlace())
return InPlaceType::NONE;
else
Expand All @@ -292,6 +292,9 @@ class Layer {
* @return true if supports backwarding, else false
*/
virtual bool supportBackwarding() const = 0;

protected:
bool is_inplace = false; /**< whether this layer is in-place or not */
};

/// @todo Decide where to put and how to implement(#986)
Expand Down
6 changes: 3 additions & 3 deletions nntrainer/layers/layer_node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -931,11 +931,11 @@ bool LayerNode::supportInPlace() const {
}

/**
* @brief Initialize the in-place type of the layer
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
InPlaceType LayerNode::initializeInPlaceType() {
inplace_type = layer->initializeInPlaceType();
InPlaceType LayerNode::initializeInPlace() {
inplace_type = layer->initializeInPlace();
return inplace_type;
}

Expand Down
4 changes: 2 additions & 2 deletions nntrainer/layers/layer_node.h
Original file line number Diff line number Diff line change
Expand Up @@ -354,15 +354,15 @@ class LayerNode final : public ml::train::Layer, public GraphNode {
bool supportInPlace() const;

/**
* @brief Initialize the in-place type of the layer
* @brief Initialize the in-place settings of the layer
* @details If it is a layer that supports in-place, the default in-place type
* is NONE_RESTRICTING, but if there is a RESTRICTING type among the input
* layers, it is set to NONE in the network_graph.cpp.
* Layers with exceptional behavior such as No-Operation layers should
* override this function.
* @return InPlaceType
*/
InPlaceType initializeInPlaceType();
InPlaceType initializeInPlace();
/**
* @brief Notify that this layer will execute in-place
*
Expand Down
14 changes: 9 additions & 5 deletions nntrainer/layers/layer_normalization_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -99,18 +99,22 @@ class LayerNormalizationLayer : public Layer {
*/
bool supportBackwarding() const override { return true; }

/**
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
InPlaceType initializeInPlace() final {
is_inplace = true;
return InPlaceType::NON_RESTRICTING;
}

using Layer::setProperty;

/**
* @copydoc Layer::setProperty(const std::vector<std::string> &values)
*/
void setProperty(const std::vector<std::string> &values) override;

/**
* @copydoc Layer::supportInPlace()
*/
bool supportInPlace() const override { return true; }

/**
* @copydoc Layer::setBatch(RunLayerContext &context, unsigned int batch)
*/
Expand Down
4 changes: 0 additions & 4 deletions nntrainer/layers/loss/loss_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,6 @@ class LossLayer : public Layer {
*/
virtual bool supportBackwarding() const override { return true; }

bool supportInPlace() const override { return is_inplace; }

/**
* @copydoc Layer::requireLabel()
*/
Expand All @@ -71,8 +69,6 @@ class LossLayer : public Layer {

Tensor
l; /**< loss tensor to store intermediate value to calculate loss value */

bool is_inplace = true;
};

} // namespace nntrainer
Expand Down
Loading

0 comments on commit 4c3af20

Please sign in to comment.