From 0a433fbb26c9768a0e5db8c59ebe69b88646d631 Mon Sep 17 00:00:00 2001 From: abap34 Date: Sat, 2 Sep 2023 19:20:43 +0900 Subject: [PATCH 1/7] add error message for dimension mismatch --- src/core/autodiff/broadcast/sum_to.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/core/autodiff/broadcast/sum_to.jl b/src/core/autodiff/broadcast/sum_to.jl index 17b3082..d0fe0e9 100644 --- a/src/core/autodiff/broadcast/sum_to.jl +++ b/src/core/autodiff/broadcast/sum_to.jl @@ -20,7 +20,7 @@ function sum_to(x::T, out_shape) where {T <: AbstractArray} dims = (findall(in_shape[1:(end - lead)] .!= out_shape)..., lead_axis...) return dropdims(sum(x, dims = dims), dims = lead_axis) else - # TODO:implement error + throw(DimensionMismatch("Input shape $in_shape cannot be reduced to $out_shape")) end end From bb67af9ba444cbbb6805ca2b0479f9d7571888aa Mon Sep 17 00:00:00 2001 From: abap34 Date: Sat, 2 Sep 2023 19:21:12 +0900 Subject: [PATCH 2/7] update error message for undefined forward/backward --- src/core/autodiff/propagation.jl | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/core/autodiff/propagation.jl b/src/core/autodiff/propagation.jl index 543455e..0bf799f 100644 --- a/src/core/autodiff/propagation.jl +++ b/src/core/autodiff/propagation.jl @@ -3,17 +3,13 @@ using GPUArraysCore using ..JITrench -struct NotImplementedError <: Exception end - -# TODO: Better error massage -Base.showerror(io::IO, e::NotImplementedError) = print(io, "Not Implemented") function forward(args...) - throw(NotImplementedError()) + throw(ArgumentError("Not Implemented forward function. args: $args")) end function backward(args...) - throw(NotImplementedError()) + throw(ArgumentError("Not Implemented backward function. args: $args")) end function out_to_tensor( From a4ccbd06873caa97901dcc42afeddb75342df3f5 Mon Sep 17 00:00:00 2001 From: abap34 Date: Sat, 2 Sep 2023 19:22:25 +0900 Subject: [PATCH 3/7] add error message for invalid batch size --- src/nn/data/dataloader.jl | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/nn/data/dataloader.jl b/src/nn/data/dataloader.jl index 3e8e9f1..8a94002 100644 --- a/src/nn/data/dataloader.jl +++ b/src/nn/data/dataloader.jl @@ -6,16 +6,17 @@ struct DataLoader batch_size :: Int shuffle :: Bool index :: Vector{Int} - function DataLoader(dataset; batch_size=1, shuffle=false) + function DataLoader(dataset; batch_size::Int=1, shuffle=false) + if loader.batch_size > length(loader.dataset) + throw(DomainError("Batch size must be less than or equal to the length of dataset. Batch size: $(loader.batch_size), Dataset length: $(length(loader.dataset))")) + elseif loader.batch_size < 1 + throw(DomainError("Batch size must be greater than or equal to 1. Batch size: $(loader.batch_size)")) + end new(dataset, batch_size, shuffle, zeros(Int, length(dataset))) end end function Base.iterate(loader::DataLoader) - if loader.batch_size > length(loader.dataset) - # TODO: better error - throw(DomainError("batch size > data length error")) - end loader.index .= randperm(length(loader.dataset)) data = loader.dataset[1:loader.batch_size] head = loader.batch_size + 1 From 76f3d0eee64f4e18a0b77231c723bca06b2891ea Mon Sep 17 00:00:00 2001 From: abap34 Date: Sat, 2 Sep 2023 19:22:45 +0900 Subject: [PATCH 4/7] add error message for float array --- src/nn/function/metrics.jl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/nn/function/metrics.jl b/src/nn/function/metrics.jl index 450b94c..8ed78da 100644 --- a/src/nn/function/metrics.jl +++ b/src/nn/function/metrics.jl @@ -1,6 +1,5 @@ function accuracy(::AbstractArray{<:AbstractFloat}, ::AbstractArray{<:AbstractFloat}) - # TODO impl error - throw(DomainError("")) + throw(DomainError("Accuracy is not defined for floating point arrays.")) end From 9bb2ff554bfbfcc6f974aa07f2c52b9c76ffb5fd Mon Sep 17 00:00:00 2001 From: abap34 Date: Sat, 2 Sep 2023 19:24:29 +0900 Subject: [PATCH 5/7] make Parameter contain meta data --- src/nn/layer/layer.jl | 14 +++++++++++++- src/nn/layer/parameters.jl | 9 ++++++--- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/src/nn/layer/layer.jl b/src/nn/layer/layer.jl index 6c3a7b7..aca60e4 100644 --- a/src/nn/layer/layer.jl +++ b/src/nn/layer/layer.jl @@ -1,7 +1,19 @@ using ..JITrench using DataStructures: OrderedDict, DefaultDict -Parameter = OrderedDict{String, Dict{String, <: AbstractTensor}} +struct Parameter + weight :: OrderedDict{String, Dict{String, <: AbstractTensor}} + layer_names :: Vector{String} + meta :: Dict{String, Any} + function Parameter(weight::OrderedDict{String, Dict{String, <: AbstractTensor}}) + layer_names = Vector{String}(undef, length(weight)) + for (i, key) in enumerate(keys(weight)) + layer_names[i] = key + end + return new(weight, layer_names, Dict{String, Any}()) + end +end + abstract type Layer end diff --git a/src/nn/layer/parameters.jl b/src/nn/layer/parameters.jl index a64855f..d3913ea 100644 --- a/src/nn/layer/parameters.jl +++ b/src/nn/layer/parameters.jl @@ -1,17 +1,20 @@ function iterate_layer(params::Parameter) - return params + return params.weight end function iterate_all(params::Parameter) - return Base.Iterators.map(x -> x.second, Iterators.flatten(values(params))) + return Base.Iterators.map(x -> x.second, Iterators.flatten(values(params.weight))) end function cleargrads!(params::Parameter) - for param in iterate_all(params) + for param in iterate_all(params.weight) JITrench.AutoDiff.cleargrad!(param) end end +function layer_names(params::Parameter) + return params.layer_names +end From 93e88c996dc4d1d37d4c33a32eb8421c8c83d5a3 Mon Sep 17 00:00:00 2001 From: abap34 Date: Sat, 2 Sep 2023 19:24:45 +0900 Subject: [PATCH 6/7] add error message for dimension mismatch --- src/nn/layer/linear.jl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/nn/layer/linear.jl b/src/nn/layer/linear.jl index 6e84b6a..b7464ba 100644 --- a/src/nn/layer/linear.jl +++ b/src/nn/layer/linear.jl @@ -32,8 +32,7 @@ function (linear::Linear)(initializer::Initializer) device = initializer.device if !(linear.in_dim isa Nothing) if in_dim != linear.in_dim - # TODO: impl Error - throw(DimensionMismatch("")) + throw(DimensionMismatch("Input dimension $in_dim does not match the expected dimension $(linear.in_dim)")) end end out_dim = linear.out_dim From ce950bd6a223aa0c50bfe5458832b7fc831f7978 Mon Sep 17 00:00:00 2001 From: abap34 Date: Sat, 2 Sep 2023 19:24:57 +0900 Subject: [PATCH 7/7] add error message for invalid gpu idx --- src/core/autodiff/device.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/core/autodiff/device.jl b/src/core/autodiff/device.jl index a1fe36f..b023ed7 100644 --- a/src/core/autodiff/device.jl +++ b/src/core/autodiff/device.jl @@ -6,7 +6,7 @@ struct GPU <: Device idx::Int64 function GPU(idx::Int64) if idx < 0 - # TODO: implement Error + throw(ArgumentError("GPU index must be non-negative. Passed idx: $idx")) end return new(idx) end