Skip to content

Commit

Permalink
pass layer names to ops
Browse files Browse the repository at this point in the history
  • Loading branch information
goliaro committed Jan 15, 2024
1 parent bc6b930 commit a348152
Show file tree
Hide file tree
Showing 87 changed files with 455 additions and 52 deletions.
1 change: 1 addition & 0 deletions include/flexflow/ops/add_bias_residual_layer_norm_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ struct AddBiasResidualLayerNormParams {
bool elementwise_affine;
float eps;
bool use_bias;
char name[MAX_OPNAME];
bool is_valid(
std::pair<ParallelTensorShape, ParallelTensorShape> const &) const;
};
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/aggregate_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ namespace FlexFlow {
struct AggregateParams {
int n;
float lambda_bal;
char name[MAX_OPNAME];
bool is_valid(std::vector<ParallelTensorShape> const &) const;
};
bool operator==(AggregateParams const &, AggregateParams const &);
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/aggregate_spec_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ namespace FlexFlow {
struct AggregateSpecParams {
int n;
float lambda_bal;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(AggregateSpecParams const &, AggregateSpecParams const &);
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/arg_topk_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ struct ArgTopKParams {
int k;
bool sorted;
bool speculative_decoding;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(ArgTopKParams const &, ArgTopKParams const &);
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/argmax_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ namespace FlexFlow {
struct ArgMaxParams {
bool beam_search;
bool is_valid(ParallelTensorShape const &) const;
char name[MAX_OPNAME];
};
bool operator==(ArgMaxParams const &, ArgMaxParams const &);

Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/attention_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ struct MultiHeadAttentionParams {
int embed_dim, num_heads, kdim, vdim;
float dropout;
bool bias, add_bias_kv, add_zero_attn;
char name[MAX_OPNAME];

bool is_valid(std::tuple<ParallelTensorShape,
ParallelTensorShape,
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/batch_matmul_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ namespace FlexFlow {

struct BatchMatmulParams {
int a_seq_length_dim, b_seq_length_dim;
char name[MAX_OPNAME];
bool is_valid(
std::pair<ParallelTensorShape, ParallelTensorShape> const &) const;
};
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/beam_topk_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ struct BeamTopKParams {
LayerID layer_guid;
bool sorted;
int max_beam_width;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(BeamTopKParams const &, BeamTopKParams const &);
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/cast_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ namespace FlexFlow {

struct CastParams {
DataType dtype;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(CastParams const &, CastParams const &);
Expand Down
2 changes: 1 addition & 1 deletion include/flexflow/ops/concat_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ namespace FlexFlow {

struct ConcatParams {
int axis;

char name[MAX_OPNAME];
bool is_valid(std::vector<ParallelTensorShape> const &) const;
};

Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/conv_2d_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ struct Conv2DParams {
padding_w, groups;
ActiMode activation;
bool use_bias;
char name[MAX_OPNAME];

bool is_valid(ParallelTensorShape const &input) const;
void solve_dims(ParallelTensorShape const &input,
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/dropout_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ namespace FlexFlow {
struct DropoutParams {
float rate;
unsigned long long seed;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(DropoutParams const &, DropoutParams const &);
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/element_binary_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ struct ElementBinaryParams {
LayerID layer_guid;
OperatorType type;
bool inplace_a;
char name[MAX_OPNAME];

bool is_valid(
std::pair<ParallelTensorShape, ParallelTensorShape> const &) const;
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/element_unary_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ struct ElementUnaryParams {
bool inplace;
float scalar = 0.0;
LayerID layer_guid;
char name[MAX_OPNAME];

bool is_valid(ParallelTensorShape const &) const;
};
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/embedding_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ struct EmbeddingParams {
LayerID layer_guid;
AggrMode aggr;
DataType data_type;
char name[MAX_OPNAME];

bool is_valid(ParallelTensorShape const &) const;
};
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/experts_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ struct ExpertsParams {
int experts_internal_dim_size;
bool use_bias;
ActiMode activation;
char name[MAX_OPNAME];

bool is_valid(std::vector<ParallelTensorShape> const &) const;
};
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/flat_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
namespace FlexFlow {

struct FlatParams {
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
void solve_dims(ParallelTensorShape const &input,
ParallelDim output_dims[MAX_TENSOR_DIM],
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/gather_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ namespace FlexFlow {
struct GatherParams {
int legion_dim;
LayerID layer_guid;
char name[MAX_OPNAME];
bool is_valid(
std::pair<ParallelTensorShape, ParallelTensorShape> const &input) const;
};
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/groupby_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ namespace FlexFlow {
struct Group_byParams {
int n;
float alpha;
char name[MAX_OPNAME];
bool is_valid(
std::pair<ParallelTensorShape, ParallelTensorShape> const &) const;
};
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/inc_multihead_self_attention_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ struct IncMultiHeadSelfAttentionParams {
scaling_query, qk_prod_scaling, position_bias;
DataType quantization_type;
bool offload;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};

Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/layer_norm_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ struct LayerNormParams {
bool elementwise_affine;
float eps;
bool use_bias;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};

Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/linear_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ class LinearParams {
float kernel_reg_lambda;
DataType quantization_type;
bool offload;
char name[MAX_OPNAME];

bool is_valid(ParallelTensorShape const &input_shape) const;
void solve_dims(const ParallelTensor input,
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/pool_2d_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ struct Pool2DParams {
int kernel_h, kernel_w, stride_h, stride_w, padding_h, padding_w;
PoolType pool_type;
ActiMode activation;
char name[MAX_OPNAME];

bool is_valid(ParallelTensorShape const &input) const;
void solve_dims(ParallelTensorShape const &input,
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/reduce_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ struct ReduceParams {
std::vector<int> axes;
bool keepdims;
LayerID layer_guid;
char name[MAX_OPNAME];

bool is_valid(ParallelTensorShape const &) const;
};
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/reshape_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ namespace FlexFlow {
struct ReshapeParams {
std::vector<int> shape;
LayerID layer_guid;
char name[MAX_OPNAME];

bool is_valid(ParallelTensorShape const &) const;
};
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/residual_layer_norm_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ struct ResidualLayerNormParams {
float eps;
bool use_bias;
bool use_two_residuals;
char name[MAX_OPNAME];
bool is_valid(std::tuple<ParallelTensorShape,
ParallelTensorShape,
ParallelTensorShape> const &) const;
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/residual_rms_norm_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ struct ResidualRMSNormParams {
LayerID layer_guid;
float eps;
int dim;
char name[MAX_OPNAME];
bool is_valid(
std::pair<ParallelTensorShape, ParallelTensorShape> const &input) const;
};
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/rms_norm_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ struct RMSNormParams {
LayerID layer_guid;
float eps;
int dim;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};

Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/sampling_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ namespace FlexFlow {

struct SamplingParams {
float top_p;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(SamplingParams const &, SamplingParams const &);
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/sigmoid_silu_multi_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ namespace FlexFlow {

struct SigmoidSiluMultiParams {
LayerID layer_guid;
char name[MAX_OPNAME];
bool is_valid(
std::pair<ParallelTensorShape, ParallelTensorShape> const &) const;
};
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/softmax_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ namespace FlexFlow {

struct SoftmaxParams {
int dim;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(SoftmaxParams const &, SoftmaxParams const &);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ struct SpecIncMultiHeadSelfAttentionParams {
float dropout, scaling_factor;
bool qkv_bias, final_bias, add_zero_attn, apply_rotary_embedding,
scaling_query, qk_prod_scaling, position_bias;

char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};

Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/split_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ namespace FlexFlow {
struct SplitParams {
std::vector<int> splits;
int legion_axis;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};

Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/topk_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ namespace FlexFlow {
struct TopKParams {
int k;
bool sorted;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(TopKParams const &, TopKParams const &);
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/ops/transpose_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ namespace FlexFlow {

struct TransposeParams {
std::vector<int> perm;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ struct TreeIncMultiHeadSelfAttentionParams {
scaling_query, qk_prod_scaling, position_bias;
DataType quantization_type;
bool offload;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};

Expand Down
1 change: 1 addition & 0 deletions include/flexflow/parallel_ops/allreduce_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ namespace FlexFlow {

struct AllReduceParams {
int allreduce_legion_dim;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(AllReduceParams const &, AllReduceParams const &);
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/parallel_ops/combine_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ namespace FlexFlow {
struct CombineParams {
int combine_legion_dim;
int combine_degree;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(CombineParams const &, CombineParams const &);
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/parallel_ops/fused_parallel_op_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ namespace FlexFlow {

struct FusedParallelOpParams {
std::vector<ParallelOpInfo> parallel_ops;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(FusedParallelOpParams const &, FusedParallelOpParams const &);
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/parallel_ops/partition_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ namespace FlexFlow {
struct RepartitionParams {
int repartition_legion_dim;
int repartition_degree;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(RepartitionParams const &, RepartitionParams const &);
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/parallel_ops/reduction_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ namespace FlexFlow {
struct ReductionParams {
int reduction_legion_dim;
int reduction_degree;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(ReductionParams const &, ReductionParams const &);
Expand Down
1 change: 1 addition & 0 deletions include/flexflow/parallel_ops/replicate_params.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ namespace FlexFlow {
struct ReplicateParams {
int replicate_legion_dim;
int replicate_degree;
char name[MAX_OPNAME];
bool is_valid(ParallelTensorShape const &) const;
};
bool operator==(ReplicateParams const &, ReplicateParams const &);
Expand Down
12 changes: 11 additions & 1 deletion src/ops/add_bias_residual_layer_norm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,9 @@ AddBiasResidualLayerNormParams AddBiasResidualLayerNorm::get_params() const {
params.elementwise_affine = this->elementwise_affine;
params.eps = this->eps;
params.use_bias = this->use_bias;
if (this->name != nullptr) {
strcpy(params.name, this->name);
}
return params;
}

Expand Down Expand Up @@ -213,7 +216,7 @@ AddBiasResidualLayerNorm::AddBiasResidualLayerNorm(
params.use_bias,
params.eps,
allocate_weights,
name) {}
params.name) {}

AddBiasResidualLayerNorm::AddBiasResidualLayerNorm(
FFModel &model,
Expand Down Expand Up @@ -755,6 +758,8 @@ void AddBiasResidualLayerNorm::serialize(Legion::Serializer &sez) const {
sez.serialize(this->elementwise_affine);
sez.serialize(this->eps);
sez.serialize(this->use_bias);
sez.serialize(strlen(this->name));
sez.serialize(this->name, strlen(this->name));
}

using PCG::Node;
Expand Down Expand Up @@ -783,13 +788,18 @@ Node AddBiasResidualLayerNorm::deserialize(FFModel &ff,
dez.deserialize(elementwise_affine);
dez.deserialize(eps);
dez.deserialize(use_bias);
size_t name_len;
char name[MAX_OPNAME] = {0};
dez.deserialize(name_len);
dez.deserialize(name, name_len);

AddBiasResidualLayerNormParams params;
params.layer_guid = layer_guid;
params.axes = axes;
params.elementwise_affine = elementwise_affine;
params.eps = eps;
params.use_bias = use_bias;
strcpy(params.name, name);
return ff.get_or_create_node<AddBiasResidualLayerNorm>({inputs[0], inputs[1]},
params);
}
Expand Down
Loading

0 comments on commit a348152

Please sign in to comment.