From 7e47d6bc71dcdd2f870a94815881a098eeac0a57 Mon Sep 17 00:00:00 2001 From: nHackel Date: Tue, 3 Dec 2024 17:35:43 +0100 Subject: [PATCH 1/7] Init documentation with direct reco example --- .github/workflows/CI.yml | 22 +++++ .gitignore | 9 ++ README.md | 3 + docs/Project.toml | 14 +++ docs/make.jl | 51 ++++++++++ docs/src/API/algorithm.md | 52 +++++++++++ docs/src/API/plan.md | 60 ++++++++++++ docs/src/example_intro.md | 27 ++++++ docs/src/index.md | 47 ++++++++++ docs/src/literate/example/0_radon_data.jl | 98 ++++++++++++++++++++ docs/src/literate/example/1_interface.jl | 60 ++++++++++++ docs/src/literate/example/2_direct.jl | 63 +++++++++++++ docs/src/literate/example/3_direct_result.jl | 62 +++++++++++++ 13 files changed, 568 insertions(+) create mode 100644 docs/Project.toml create mode 100644 docs/make.jl create mode 100644 docs/src/API/algorithm.md create mode 100644 docs/src/API/plan.md create mode 100644 docs/src/example_intro.md create mode 100644 docs/src/index.md create mode 100644 docs/src/literate/example/0_radon_data.jl create mode 100644 docs/src/literate/example/1_interface.jl create mode 100644 docs/src/literate/example/2_direct.jl create mode 100644 docs/src/literate/example/3_direct_result.jl diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index ab4df39..eff9a2f 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -48,3 +48,25 @@ jobs: ${{ runner.os }}- - uses: julia-actions/julia-buildpkg@v1 - uses: julia-actions/julia-runtest@v1 + + docs: + name: Documentation + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@v1 + with: + version: '1' + - run: | + julia --project=docs -e ' + using Pkg + Pkg.develop(PackageSpec(path=pwd())) + Pkg.instantiate()' + - run: | + julia --project=docs -e ' + using Documenter: DocMeta, doctest + using AbstractImageReconstruction' + - run: julia --project=docs docs/make.jl + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + DOCUMENTER_KEY: ${{ secrets.DOCUMENTER_KEY }} \ No newline at end of file diff --git a/.gitignore b/.gitignore index b067edd..49c4447 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,10 @@ +*.jl.cov +*.jl.*.cov +*.jl.mem +docs/build/ +docs/site/ +docs/src/generated/ + +Manifest.toml + /Manifest.toml diff --git a/README.md b/README.md index 4c92640..d3cf344 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,9 @@ [![Build Status](https://github.com/JuliaImageRecon/AbstractImageReconstruction.jl/actions/workflows/CI.yml/badge.svg?branch=main)](https://github.com/JuliaImageRecon/AbstractImageReconstruction.jl/actions/workflows/CI.yml?query=branch%3Amain) +[![](https://img.shields.io/badge/docs-latest-blue.svg)](https://JuliaImageRecon.github.io/AbstractImageReconstruction.jl/latest) + + This package contains an interface and type hierarchy for image reconstruction algorithms and their parameters, together with associated utility tools. ## Installation diff --git a/docs/Project.toml b/docs/Project.toml new file mode 100644 index 0000000..aa5ebf1 --- /dev/null +++ b/docs/Project.toml @@ -0,0 +1,14 @@ +[deps] +AbstractImageReconstruction = "a4b4fdbf-6459-4ec9-990d-77e1fa24a91b" +BenchmarkTools = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf" +CairoMakie = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0" +Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" +ImageGeoms = "9ee76f2b-840d-4475-b6d6-e485c9297852" +ImagePhantoms = "71a99df6-f52c-4da1-bd2a-69d6f37f3252" +LinearOperatorCollection = "a4a2c56f-fead-462a-a3ab-85921a5f2575" +Literate = "98b081ad-f1c9-55d3-8b20-4c87d4299306" +RadonKA = "86de8297-835b-47df-b249-c04e8db91db5" +RegularizedLeastSquares = "1e9c538a-f78c-5de5-8ffb-0b6dbe892d23" + +[compat] +Documenter = "1" diff --git a/docs/make.jl b/docs/make.jl new file mode 100644 index 0000000..a4dcf46 --- /dev/null +++ b/docs/make.jl @@ -0,0 +1,51 @@ +using Documenter, Literate, AbstractImageReconstruction + +# Generate examples +OUTPUT_BASE = joinpath(@__DIR__(), "src/generated") +INPUT_BASE = joinpath(@__DIR__(), "src/literate") +for (_, dirs, _) in walkdir(INPUT_BASE) + for dir in dirs + OUTPUT = joinpath(OUTPUT_BASE, dir) + INPUT = joinpath(INPUT_BASE, dir) + for file in filter(f -> endswith(f, ".jl"), readdir(INPUT)) + Literate.markdown(joinpath(INPUT, file), OUTPUT) + end + end +end + +makedocs( + format = Documenter.HTML(; + prettyurls=get(ENV, "CI", "false") == "true", + canonical="https://github.com/JuliaImageRecon/AbstractImageReconstruction.jl", + assets=String[], + collapselevel=1, + ), + repo="https://github.com/JuliaImageRecon/AbstractImageReconstruction.jl/blob/{commit}{path}#{line}", + modules = [AbstractImageReconstruction], + sitename = "AbstractImageReconstruction.jl", + authors = "Niklas Hackelberg, Tobias Knopp", + pages = [ + "Home" => "index.md", + "Example: Radon Reconstruction Package" => Any[ + "Introduction" => "example_intro.md", + "Radon Data" => "generated/example/0_radon_data.md", + "Interface" => "generated/example/1_interface.md", + "Direct Reconstruction" => "generated/example/2_direct.md", + "Direct Reconstruction Result" => "generated/example/3_direct_result.md", + ], + "How to" => Any[ + #"Construct RecoPlan" => "generated/howto/reco_plan.md", + #"Caching" => "generated/howto/caching.md", + #"Listeners" => "generated/howto/listeners.md", + ], + #"API Reference" => Any["Solvers" => "API/solvers.md", + #"Regularization Terms" => "API/regularization.md"], + + ], + pagesonly = true, + checkdocs = :none, + doctest = false, + doctestfilters = [r"(\d*)\.(\d{4})\d+"] + ) + +deploydocs(repo = "github.com/JuliaImageRecon/AbstractImageReconstruction.jl.git", push_preview = true) \ No newline at end of file diff --git a/docs/src/API/algorithm.md b/docs/src/API/algorithm.md new file mode 100644 index 0000000..3887043 --- /dev/null +++ b/docs/src/API/algorithm.md @@ -0,0 +1,52 @@ +# API for Regularizers +This page contains documentation of the public API of the RegularizedLeastSquares. In the Julia +REPL one can access this documentation by entering the help mode with `?` + +```@docs +RegularizedLeastSquares.L1Regularization +RegularizedLeastSquares.L2Regularization +RegularizedLeastSquares.L21Regularization +RegularizedLeastSquares.LLRRegularization +RegularizedLeastSquares.NuclearRegularization +RegularizedLeastSquares.TVRegularization +``` + +## Projection Regularization +```@docs +RegularizedLeastSquares.PositiveRegularization +RegularizedLeastSquares.RealRegularization +``` + +## Nested Regularization +```@docs +RegularizedLeastSquares.innerreg(::AbstractNestedRegularization) +RegularizedLeastSquares.sink(::AbstractNestedRegularization) +RegularizedLeastSquares.sinktype(::AbstractNestedRegularization) +``` + +## Scaled Regularization +```@docs +RegularizedLeastSquares.AbstractScaledRegularization +RegularizedLeastSquares.scalefactor +RegularizedLeastSquares.NormalizedRegularization +RegularizedLeastSquares.NoNormalization +RegularizedLeastSquares.MeasurementBasedNormalization +RegularizedLeastSquares.SystemMatrixBasedNormalization +RegularizedLeastSquares.FixedParameterRegularization +``` + +## Misc. Nested Regularization +```@docs +RegularizedLeastSquares.MaskedRegularization +RegularizedLeastSquares.TransformedRegularization +RegularizedLeastSquares.PlugAndPlayRegularization +``` + +## Miscellaneous Functions +```@docs +RegularizedLeastSquares.prox!(::AbstractParameterizedRegularization, ::AbstractArray) +RegularizedLeastSquares.prox!(::Type{<:AbstractParameterizedRegularization}, ::Any, ::Any) +RegularizedLeastSquares.norm(::AbstractParameterizedRegularization, ::AbstractArray) +RegularizedLeastSquares.λ(::AbstractParameterizedRegularization) +RegularizedLeastSquares.norm(::Type{<:AbstractParameterizedRegularization}, ::Any, ::Any) +``` \ No newline at end of file diff --git a/docs/src/API/plan.md b/docs/src/API/plan.md new file mode 100644 index 0000000..5774048 --- /dev/null +++ b/docs/src/API/plan.md @@ -0,0 +1,60 @@ +# API for Solvers +This page contains documentation of the public API of the RegularizedLeastSquares. In the Julia +REPL one can access this documentation by entering the help mode with `?` + +## solve! +```@docs +RegularizedLeastSquares.solve!(::AbstractLinearSolver, ::Any) +RegularizedLeastSquares.init!(::AbstractLinearSolver, ::Any) +RegularizedLeastSquares.init!(::AbstractLinearSolver, ::AbstractSolverState, ::AbstractMatrix) + +``` + +## ADMM +```@docs +RegularizedLeastSquares.ADMM +``` + +## CGNR +```@docs +RegularizedLeastSquares.CGNR +``` + +## Kaczmarz +```@docs +RegularizedLeastSquares.Kaczmarz +``` + +## FISTA +```@docs +RegularizedLeastSquares.FISTA +``` + +## OptISTA +```@docs +RegularizedLeastSquares.OptISTA +``` + +## POGM +```@docs +RegularizedLeastSquares.POGM +``` + +## SplitBregman +```@docs +RegularizedLeastSquares.SplitBregman +``` + +## Miscellaneous +```@docs +RegularizedLeastSquares.solverstate +RegularizedLeastSquares.solversolution +RegularizedLeastSquares.solverconvergence +RegularizedLeastSquares.StoreSolutionCallback +RegularizedLeastSquares.StoreConvergenceCallback +RegularizedLeastSquares.CompareSolutionCallback +RegularizedLeastSquares.linearSolverList +RegularizedLeastSquares.createLinearSolver +RegularizedLeastSquares.applicableSolverList +RegularizedLeastSquares.isapplicable +``` \ No newline at end of file diff --git a/docs/src/example_intro.md b/docs/src/example_intro.md new file mode 100644 index 0000000..353fcc5 --- /dev/null +++ b/docs/src/example_intro.md @@ -0,0 +1,27 @@ +# Small Reconstruction Package for Radon projections +In this example we will implement a small image reconstruction package with the help of `AbstractImageReconstruction.jl`. Our example reconstruction package aims to provide direct and iterative reconstruction algorithms for Radon projection data with optional GPU support. + +Most of the desired functionality is already implemented in various Julia packages. Our reconstruction packages now needs to properly connect these packages and transform the data into the appropriate formats for each package. + +## Installation +In addition to AbstractImageReconstruction.jl, we will need a few more packages to get started. We can install these packages using the Julia package manager. Open a Julia REPL and run the following command: + +```julia +using Pkg +Pkg.add("AbstractImageReconstruction") +``` +This will download and install AbstractImageReconstruction.jl and its dependencies. To install a different version, please consult the [Pkg documentation](https://pkgdocs.julialang.org/dev/managing-packages/#Adding-packages). + + +[RadonKA.jl](https://github.com/roflmaostc/RadonKA.jl/tree/main) provides us with fast Radon forward and backprojections, which we can use for direct reconstructions and preparing example data for our package. + +[LinearOperatorCollection.jl](https://github.com/JuliaImageRecon/LinearOperatorCollection.jl) wraps the functionality of RadonKA.jl in a matrix-free linear operator, which can be used in iterative solvers. + +[RegularizedLeastSquares.jl](https://github.com/JuliaImageRecon/RegularizedLeastSquares.jl) offers a variety of iterative solver and regularization options. + +[ImagePhantoms.jl](https://github.com/JuliaImageRecon/ImagePhantoms.jl) and [ImageGeoms.jl](https://github.com/JuliaImageRecon/ImageGeoms.jl) allow us to define digital software "phantoms", which we will use to test our reconstruction algorithms. + +Lastly, we will use [CairoMakie.jl](https://docs.makie.org/stable/) to visualize our results. + +## Outline + diff --git a/docs/src/index.md b/docs/src/index.md new file mode 100644 index 0000000..6b3ddaa --- /dev/null +++ b/docs/src/index.md @@ -0,0 +1,47 @@ +# AbstractImageReconstruction.jl + +*Abstract Interface for Medical Image Reconstruction Packages* + +## Introduction + +AbstractImageReconstruction.jl is a Julia package that serves as the core API for medical imaging packages. It provides implementations an interface and type hierarchy with which one can represent and implement image reconstruction algorithms, their parameters and runtime behaviour. In particular, this package serves as the API of the Julia packages [MPIReco.jl](https://github.com/MagneticParticleImaging/MPIReco.jl). + +## Features + +* Variety of optimization algorithms optimized for least squares problems +* Storing, loading and manipulating of reconstruction algorithms with (partially) set parameters +* Attaching callback listeners to parameters + +## Installation + +Within Julia, use the package manager: +```julia +using Pkg +Pkg.add("AbstractImageReconstruction") +``` +AbstractImageReconstruction is not intended to be used alone, but together with an image reconstruction package that implements the provided interface, such as [MPIReco.jl](https://github.com/MagneticParticleImaging/MPIReco.jl). + +## Usage +Concrete construction of reconstruction algorithms depend on the implementation of the reconstruction package. Once an algorithms is constructed with the given paramters, images can be reconstructed as follows: +```julia +using AbstractImageReconstruction, MPIReco + +params = ... # Setup reconstruction paramter +algo = ... # Setup chosen algorithm with params +raw = ... # Setup raw data + +image = reconstruct(algo, raw) +``` +Once an algorithm is constructed it can be transformed into a `RecoPlan`. These are mutable and transparent wrappers around the nested types of the algorithm and its paramters, that can be stored and restored to and from TOML files. + +```julia +plan = toPlan(algo) +savePlan(MPIReco, "Example", plan) +plan = loadPlan(MPIReco, "Example", [MPIReco, RegularizedLeastSquares, MPIFiles]) + +algo2 = build(plan) +algo == algo2 # true +``` +Unlike concrete algorithm instances, a `RecoPlan` may still be missing certain values of its fields and it can encode the structure of an image reconstruction algorithm without concrete parameterization. + +It is also possible to attach `Listeners` to `RecoPlan` fields, that call user-specified functions if they are changed. This allows specific `RecoPlans` to provide smart default paramter choices or embedding a plan into a GUI. \ No newline at end of file diff --git a/docs/src/literate/example/0_radon_data.jl b/docs/src/literate/example/0_radon_data.jl new file mode 100644 index 0000000..5765a1c --- /dev/null +++ b/docs/src/literate/example/0_radon_data.jl @@ -0,0 +1,98 @@ +# # Radon Data + +# In this example we will setup our Radon data using RadonKA.jl, ImagePhantoms.jl and ImageGeoms.jl. We will start with simple 2D images and their sinograms and move up to a time series of 3D images and sinograms. + +# ## Background +# The Radon transform is a integral transform which projects the values of a function(/or a phantom) along straight lines onto a detector. +# These projections are recorded for a number of different angles and form the so-called sinogram. The Radon transform and its adjoint form the mathematical basis +# for Computed Tomography (CT) and other imaging modalities such as single photon emission computed tomography (SPECT) and positron emission tomography (PET). + +# ## 2D Phantom +# We will use a simple Shepp-Logan phantom to generate our Radon data. The phantom is a standard test image for medical imaging and consists of a number of ellipses with different intensities. +# It can be generated with ImagePhantoms.jl and ImageGeoms.jl. as follows: + +using ImagePhantoms, ImageGeoms +N = 256 +image = shepp_logan(N, SheppLoganToft()) +size(image) + +# This produces a 256x256 image of a Shepp-Logan phantom. Next, we will generate the Radon data using `radon` from RadonKA.jl. +# The arguments of this function are image or phantom under transformation, the anlges at which the projections are taken, and the used geometry of the system. For this example we will use the default parallel circle geometry. +# For more details, we refer to the RadonKA.jl documentation. We will use 256 angles for the projections, between 0 and π. +using RadonKA +angles = collect(range(0, π, 256)) +sinogram = Array(RadonKA.radon(image, angles)) +size(sinogram) + + +# To visualize our progress so far, we will use CairoMakie.jl and a small helper function: +using CairoMakie +function plot_image(figPos, img; title = "", width = 150, height = 150) + ax = CairoMakie.Axis(figPos[1, 1]; yreversed=true, title, width, height) + hidedecorations!(ax) + hm = heatmap!(ax, img) + Colorbar(figPos[1, 2], hm) +end +fig = Figure() +plot_image(fig[1,1], image, title = "Image") +plot_image(fig[1,2], sinogram, title = "Sinogram") +resize_to_layout!(fig) +fig + +# ## 3D Pnantom +# RadonKA.jl also supports 3D Radon transforms. The first two dimensions are interpreted as the XY plane where the transform applied and the last dimensions is the rotational axis z of the projections. +# For that we need to create a 3D Shepp-Logan phantom. First we retrieve the parameters of the ellipsoids of the Shepp-Logan phantom: +shape = (128, 128, 128) +params = map(collect, ellipsoid_parameters(; fovs = shape)); + +# We then scale the intensities of the ellipsoids to [0.0, ..., 1.0]: +toft_settings = [1.0, -0.8, -0.2, -0.2, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] +for idx in eachindex(toft_settings) + params[idx][10] = toft_settings[idx] +end + +# Finally, we create the 3D Shepp-Logan phantom by defining and sampling our image geometry: +ob = ellipsoid(map(Tuple, params)) +ig = ImageGeom(;dims = shape) +image = phantom(axes(ig)..., ob) +size(image) + +# Now we can compute the 3D Radon transform of our phantom: +sinogram = Array(RadonKA.radon(image, angles)) +size(sinogram) + +# Let's visualize the 3D Radon data: +fig = Figure() +plot_image(fig[1,1], reverse(image[52,:,:]), title = "Slice YZ at z=48") +plot_image(fig[1,2], image[:,80,:], title = "Slice XZ at y=80") +plot_image(fig[2,1], reverse(image[:, :, 48]), title = "Slice XY at z=48") +plot_image(fig[2,2], reverse(sinogram[:,:,48]), title = "Sinogram at z=48") +plot_image(fig[3,1], reverse(image[:, :, 32]), title = "Slice XY at z=32") +plot_image(fig[3,2], reverse(sinogram[:,:,32]), title = "Sinogram at z=32") +resize_to_layout!(fig) +fig + + +# ## Time Series of 3D Phantoms +# Lastly, we want to add a time dimension to our 3D phantom. For our example we will increase the intensity of the third ellipsoid every time step or frame. +images = similar(image, size(image)..., 5) +sinograms = similar(sinogram, size(sinogram)..., 5) +for (i, intensity) in enumerate(range(params[3][end], 0.3, 5)) + params[3][end] = intensity + local ob = ellipsoid(map(Tuple, params)) + local ig = ImageGeom(;dims = shape) + images[:, :, :, i] = phantom(axes(ig)..., ob) + sinograms[:, :, :, i] = Array(RadonKA.radon(images[:, :, :, i], angles)) +end +size(sinograms) + +fig = Figure() +for i = 1:5 + plot_image(fig[i,1], reverse(images[:, :, 48, i])) + plot_image(fig[i,2], sinograms[:, :, 48, i]) +end +resize_to_layout!(fig) +fig + +# The goal of our reconstruction package is now to recover an approximation of the time-series 3D phantoms from a given time-series of sinograms. +# In the next section we will introduce our class hierarchies and explore the API of AbstractImageReconstruction.jl. \ No newline at end of file diff --git a/docs/src/literate/example/1_interface.jl b/docs/src/literate/example/1_interface.jl new file mode 100644 index 0000000..dafede0 --- /dev/null +++ b/docs/src/literate/example/1_interface.jl @@ -0,0 +1,60 @@ +# # Interface +# This section introduces the abstract types we need to implement for our reconstruction package and how they relate to interface and types of AbstractImageReconstruction.jl. + +# ## Abstract Types +# We start by defining the abstract types we need to implement for our reconstruction package. AbstractImageReconstruction.jl provides two abstract types: +# ```julia +# abstract type AbstractImageReconstructionAlgorithm end +# abstract type AbstractImageReconstructionParameters end +# ``` +# `AbstractImageReconstructionAlgorithms` represent a given reconstruction algorithm, while `AbstractImageReconstructionParameters` represent the parameters an algorithm was constructed with. +# Once constructed, algorithms can be used to reconstruct images repeatly and idealy without unecessary recomputations. + +# For our package we extend these abstract types with our own abstract subtypes: +using AbstractImageReconstruction +abstract type AbstractRadonAlgorithm <: AbstractImageReconstructionAlgorithm end +abstract type AbstractRadonParameters <: AbstractImageReconstructionParameters end + +# Later on we will have parameters that are shared between different algorithms and parameters for different processings steps of a reconstruction. +# In our case we will have preprocessing parameters and reconstruction parameters. For these we introduce the following abstract types: +abstract type AbstractRadonPreprocessingParameters <: AbstractRadonParameters end +abstract type AbstractRadonReconstructionParameters <: AbstractRadonParameters end + +# Since we want to implement both direct and iterative methods for our reconstruction, we introduce the following abstract types: +abstract type AbstractDirectRadonAlgorithm <: AbstractRadonAlgorithm end +abstract type AbstractIterativeRadonAlgorithm <: AbstractRadonAlgorithm end + +# ## Internal Interface +# Reconstruction algorithms in AbstractImageReconstruction.jl are expected to be implemented in terms of distinct processing steps, implemented in their own `process` methods. +# The `process` function takes an algorithm, parameters, and inputs and returns the result of the processing step. +# If no function is defined for an instance of an algorithm, the default implementation is called. This method tries to invoke the function `process` with the type of the algorithm: +# ```julia +# process(algo::AbstractImageReconstructionAlgorithm, param::AbstractImageReconstructionParameters, inputs...) = process(typeof(algo), param, inputs...) +# ``` +# The implementation of reconstruction algorithms is thus expected to either implement the `process` function for the algorithm type or instance. Dispatch on instances allow an instance to change its state, while dispatch on types allows for pure helper functions. +# A `process` itself can invoke other `process` functions to enable multiple processing steps and generally have arbitarry control flow. It is not required to implement a straight-foward pipeline. We will see this later when we implementd our algorithms. + +# Let's define a preprocessing step we can share between our algorithms. We want to allow the user to select certain frames from a time data and average them. +# We will use the `@kwdef` macro to provide constructor with keyword arguments and default values +using Statistics +Base.@kwdef struct RadonPreprocessingParameters <: AbstractRadonPreprocessingParameters + frames::Vector{Int64} = [] + numAverages::Int64 = 1 +end +function AbstractImageReconstruction.process(::Type{<:AbstractRadonAlgorithm}, params::RadonPreprocessingParameters, data::AbstractArray{T, 4}) where {T} + frames = isempty(params.frames) ? (1:size(data, 4)) : params.frames + data = data[:, :, :, frames] + + if params.numAverages > 1 + data = reshape(data, size(data)[1:3]..., params.numAverage, :) + data = dropdims(mean(data, dims = 4), dims = 4) + end + + return data +end + +# ## External Interface +# A user of our package should be able to reconstruct images by calling the `reconstruct` function. This function takes an algorithm and an input and returns the reconstructed image. +# Internally, the `reconstruct` function calls the `put!` and `take!` functions of the algorithm to pass the input and retrieve the output. Algorithms must implement these functions and are expected to have FIFO behavior. + +# Algorithms can internally construct and use further algorithms to which they pass data. \ No newline at end of file diff --git a/docs/src/literate/example/2_direct.jl b/docs/src/literate/example/2_direct.jl new file mode 100644 index 0000000..f3d25f4 --- /dev/null +++ b/docs/src/literate/example/2_direct.jl @@ -0,0 +1,63 @@ +include("../../literate/example/1_interface.jl") #hide +using RadonKA #hide + +# # Direct Reconstruction +# To implement our direct reconstruction algorithms we need to define a few more methods and types. We will start by defining the parameters for the backprojection and for the filtered backprojection. Afterwards we can implement the algorithm itself. + +# ## Parameters and Processing +# For convenience we first introduce a new abstract type for the direct reconstruction paramters: +abstract type AbstractDirectRadonReconstructionParameters <: AbstractRadonReconstructionParameters end +# The backprojection parameters are simple and only contain the number of angles: +Base.@kwdef struct RadonBackprojectionParameters <: AbstractDirectRadonReconstructionParameters + angles::Vector{Float64} +end + +# The filtered backprojection parameters are more complex and contain the number of angles and optionally the filter which should be used: +Base.@kwdef struct RadonFilteredBackprojectionParameters <: AbstractDirectRadonReconstructionParameters + angles::Vector{Float64} + filter::Union{Nothing, Vector{Float64}} = nothing +end +# Since we have defined no default values for the angles, they are required to be set by the user. A more advanced implementation would also allow for the geometry to be set. + +# Next we will implement the process steps for both of our backprojection variants. Since RadonKA.jl expects 2D or 3D arrays we have to transform our time series accordingly. +function AbstractImageReconstruction.process(algoT::Type{<:AbstractDirectRadonAlgorithm}, params::AbstractDirectRadonReconstructionParameters, data::AbstractArray{T, 4}) where {T} + result = [] + for i = 1:size(data, 4) + push!(result, process(algoT, params, view(data, :, :, :, i))) + end + return cat(result..., dims = 4) +end +AbstractImageReconstruction.process(::Type{<:AbstractDirectRadonAlgorithm}, params::RadonBackprojectionParameters, data::AbstractArray{T, 3}) where {T} = RadonKA.backproject(data, params.angles) +AbstractImageReconstruction.process(::Type{<:AbstractDirectRadonAlgorithm}, params::RadonFilteredBackprojectionParameters, data::AbstractArray{T, 3}) where {T} = RadonKA.backproject_filtered(data, params.angles; filter = params.filter) + +# ## Algorithm +# The direct reconstruction algorithm has essentially no state to store between reconstructions and thus only needs its parameters as fields. We want our algorithm to accept any combination of our preprocessing and direct reconstruction parameters. +# This we encode in a new type: +Base.@kwdef struct DirectRadonParameters{P, R} <: AbstractRadonParameters where {P<:AbstractRadonPreprocessingParameters, R<:AbstractDirectRadonReconstructionParameters} + pre::P + reco::R +end +# And the according processing step: +function AbstractImageReconstruction.process(algoT::Type{<:AbstractDirectRadonAlgorithm}, params::DirectRadonParameters{P, R}, data::AbstractArray{T, 4}) where {T, P<:AbstractRadonPreprocessingParameters, R<:AbstractDirectRadonReconstructionParameters} + data = process(algoT, params.pre, data) + return process(algoT, params.reco, data) +end + +# Now we can define the algorithm type itself. Algorithms are usually constructed with one argument passing in the user parameters: +mutable struct DirectRadonAlgorithm{D} <: AbstractDirectRadonAlgorithm where D <: DirectRadonParameters + parameter::D + output::Channel{Any} + DirectRadonAlgorithm(parameter::D) where D = new{D}(parameter, Channel{Any}(Inf)) +end +# And they implement a method to retrieve the used parameters: +AbstractImageReconstruction.parameters(algo::DirectRadonAlgorithm) = algo.parameter + +# And implement the `put!` and `take!` functions, mimicking the behavior of a FIFO channel: +Base.take!(algo::DirectRadonAlgorithm) = Base.take!(algo.output) +function Base.put!(algo::DirectRadonAlgorithm, data::AbstractArray{T, 4}) where {T} + lock(algo.output) do + put!(algo.output, process(algo, algo.parameter, data)) + end +end + +# The way the behaviour is implemented here, the algorithm does not buffer any inputs and instead blocks until the currenct reconstruction is done. Outputs are stored until they are retrieved. diff --git a/docs/src/literate/example/3_direct_result.jl b/docs/src/literate/example/3_direct_result.jl new file mode 100644 index 0000000..5cd3723 --- /dev/null +++ b/docs/src/literate/example/3_direct_result.jl @@ -0,0 +1,62 @@ +include("../../literate/example/1_interface.jl") #hide +include("../../literate/example/2_direct.jl") #hide +using RadonKA, ImagePhantoms, ImageGeoms, CairoMakie, AbstractImageReconstruction #hide +using CairoMakie #hide +function plot_image(figPos, img; title = "", width = 150, height = 150) #hide + ax = CairoMakie.Axis(figPos[1, 1]; yreversed=true, title, width, height) #hide + hidedecorations!(ax) #hide + hm = heatmap!(ax, img) #hide + Colorbar(figPos[1, 2], hm) #hide +end #hide +angles = collect(range(0, π, 256)) #hide +shape = (128, 128, 128) #hide +params = map(collect, ellipsoid_parameters(; fovs = shape)) #hide +toft_settings = [1.0, -0.8, -0.2, -0.2, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] #hide +for idx in eachindex(toft_settings) #hide + params[idx][10] = toft_settings[idx] #hide +end #hide +ob = ellipsoid(map(Tuple, params)) #hide +ig = ImageGeom(;dims = shape) #hide +image = phantom(axes(ig)..., ob) #hide +sinogram = Array(RadonKA.radon(image, angles)) #hide +sinograms = similar(sinogram, size(sinogram)..., 5) #hide +images = similar(image, size(image)..., 5) #hide +for (i, intensity) in enumerate(range(params[3][end], 0.3, 5)) #hide + params[3][end] = intensity #hide + local ob = ellipsoid(map(Tuple, params)) #hide + local ig = ImageGeom(;dims = shape) #hide + images[:, :, :, i] = phantom(axes(ig)..., ob) #hide + sinograms[:, :, :, i] = Array(RadonKA.radon(images[:, :, :, i], angles)) #hide +end #hide + + +# # Direct Reconstruction Result +# Now that we have implemented our direct reconstruction algorithm, we can use it to reconstruct for example the first three images of our time series. +# We first prepare our parameters: +pre = RadonPreprocessingParameters(frames = collect(1:3)) +back_reco = RadonBackprojectionParameters(;angles) +filter_reco = RadonFilteredBackprojectionParameters(;angles); + +# Then we can construct the algorithms: +algo_back = DirectRadonAlgorithm(DirectRadonParameters(pre, back_reco)) +algo_filter = DirectRadonAlgorithm(DirectRadonParameters(pre, filter_reco)); + +# And apply them to our sinograms: +imag_back = reconstruct(algo_back, sinograms) +imag_filter = reconstruct(algo_filter, sinograms); + +# Finally we can visualize the results: +fig = Figure() +for i = 1:3 + plot_image(fig[i,1], reverse(images[:, :, 48, i])) + plot_image(fig[i,2], sinograms[:, :, 48, i]) + plot_image(fig[i,3], reverse(imag_back[:, :, 48, i])) + plot_image(fig[i,4], reverse(imag_filter[:, :, 48, i])) +end +resize_to_layout!(fig) +fig + +# To add new functionality to our direct reconstruction algorithm, we can simply add new preprocessing or reconstruction parameters and implement the according processing steps. This way we can easily extend our algorithm to support new features. +# We can also define our own file format to store the results of our reconstruction algorithms or dispatch our preprocessing on files and pass files instead of the sinograms directly. + +# The way we use the algorithms here, requires the user to reconstruct the algorithm for each changed parameter. After we implement the more complex iterative reconstructions, we will take a look at how to make this process more manageable. \ No newline at end of file From 0c7c5bba8934d224125868dd95bbcae1bdee8c66 Mon Sep 17 00:00:00 2001 From: nHackel Date: Tue, 3 Dec 2024 18:47:15 +0100 Subject: [PATCH 2/7] Add iterative reco docu --- docs/make.jl | 2 + docs/src/example_intro.md | 2 +- docs/src/literate/example/0_radon_data.jl | 18 ++-- docs/src/literate/example/2_direct.jl | 2 +- docs/src/literate/example/3_direct_result.jl | 10 +-- docs/src/literate/example/4_iterative.jl | 82 +++++++++++++++++++ .../literate/example/5_iterative_result.jl | 59 +++++++++++++ 7 files changed, 159 insertions(+), 16 deletions(-) create mode 100644 docs/src/literate/example/4_iterative.jl create mode 100644 docs/src/literate/example/5_iterative_result.jl diff --git a/docs/make.jl b/docs/make.jl index a4dcf46..17fa829 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -32,6 +32,8 @@ makedocs( "Interface" => "generated/example/1_interface.md", "Direct Reconstruction" => "generated/example/2_direct.md", "Direct Reconstruction Result" => "generated/example/3_direct_result.md", + "Iterative Reconstruction" => "generated/example/4_iterative.md", + "Iterative Reconstruction Result" => "generated/example/5_iterative_result.md", ], "How to" => Any[ #"Construct RecoPlan" => "generated/howto/reco_plan.md", diff --git a/docs/src/example_intro.md b/docs/src/example_intro.md index 353fcc5..f95b459 100644 --- a/docs/src/example_intro.md +++ b/docs/src/example_intro.md @@ -1,5 +1,5 @@ # Small Reconstruction Package for Radon projections -In this example we will implement a small image reconstruction package with the help of `AbstractImageReconstruction.jl`. Our example reconstruction package aims to provide direct and iterative reconstruction algorithms for Radon projection data with optional GPU support. +In this example we will implement a small image reconstruction package with the help of `AbstractImageReconstruction.jl`. Our example reconstruction package aims to provide direct and iterative reconstruction algorithms for Radon projection data. Most of the desired functionality is already implemented in various Julia packages. Our reconstruction packages now needs to properly connect these packages and transform the data into the appropriate formats for each package. diff --git a/docs/src/literate/example/0_radon_data.jl b/docs/src/literate/example/0_radon_data.jl index 5765a1c..dc9eebe 100644 --- a/docs/src/literate/example/0_radon_data.jl +++ b/docs/src/literate/example/0_radon_data.jl @@ -42,7 +42,7 @@ fig # ## 3D Pnantom # RadonKA.jl also supports 3D Radon transforms. The first two dimensions are interpreted as the XY plane where the transform applied and the last dimensions is the rotational axis z of the projections. # For that we need to create a 3D Shepp-Logan phantom. First we retrieve the parameters of the ellipsoids of the Shepp-Logan phantom: -shape = (128, 128, 128) +shape = (64, 64, 64) params = map(collect, ellipsoid_parameters(; fovs = shape)); # We then scale the intensities of the ellipsoids to [0.0, ..., 1.0]: @@ -63,12 +63,12 @@ size(sinogram) # Let's visualize the 3D Radon data: fig = Figure() -plot_image(fig[1,1], reverse(image[52,:,:]), title = "Slice YZ at z=48") -plot_image(fig[1,2], image[:,80,:], title = "Slice XZ at y=80") -plot_image(fig[2,1], reverse(image[:, :, 48]), title = "Slice XY at z=48") -plot_image(fig[2,2], reverse(sinogram[:,:,48]), title = "Sinogram at z=48") -plot_image(fig[3,1], reverse(image[:, :, 32]), title = "Slice XY at z=32") -plot_image(fig[3,2], reverse(sinogram[:,:,32]), title = "Sinogram at z=32") +plot_image(fig[1,1], reverse(image[26,:,:]), title = "Slice YZ at z=26") +plot_image(fig[1,2], image[:,40,:], title = "Slice XZ at y=40") +plot_image(fig[2,1], reverse(image[:, :, 24]), title = "Slice XY at z=24") +plot_image(fig[2,2], reverse(sinogram[:,:,24]), title = "Sinogram at z=24") +plot_image(fig[3,1], reverse(image[:, :,16]), title = "Slice XY at z=16") +plot_image(fig[3,2], reverse(sinogram[:,:,16]), title = "Sinogram at z=16") resize_to_layout!(fig) fig @@ -88,8 +88,8 @@ size(sinograms) fig = Figure() for i = 1:5 - plot_image(fig[i,1], reverse(images[:, :, 48, i])) - plot_image(fig[i,2], sinograms[:, :, 48, i]) + plot_image(fig[i,1], reverse(images[:, :, 24, i])) + plot_image(fig[i,2], sinograms[:, :, 24, i]) end resize_to_layout!(fig) fig diff --git a/docs/src/literate/example/2_direct.jl b/docs/src/literate/example/2_direct.jl index f3d25f4..f68a80b 100644 --- a/docs/src/literate/example/2_direct.jl +++ b/docs/src/literate/example/2_direct.jl @@ -50,7 +50,7 @@ mutable struct DirectRadonAlgorithm{D} <: AbstractDirectRadonAlgorithm where D < DirectRadonAlgorithm(parameter::D) where D = new{D}(parameter, Channel{Any}(Inf)) end # And they implement a method to retrieve the used parameters: -AbstractImageReconstruction.parameters(algo::DirectRadonAlgorithm) = algo.parameter +AbstractImageReconstruction.parameter(algo::DirectRadonAlgorithm) = algo.parameter # And implement the `put!` and `take!` functions, mimicking the behavior of a FIFO channel: Base.take!(algo::DirectRadonAlgorithm) = Base.take!(algo.output) diff --git a/docs/src/literate/example/3_direct_result.jl b/docs/src/literate/example/3_direct_result.jl index 5cd3723..4bef270 100644 --- a/docs/src/literate/example/3_direct_result.jl +++ b/docs/src/literate/example/3_direct_result.jl @@ -9,7 +9,7 @@ function plot_image(figPos, img; title = "", width = 150, height = 150) #hide Colorbar(figPos[1, 2], hm) #hide end #hide angles = collect(range(0, π, 256)) #hide -shape = (128, 128, 128) #hide +shape = (64, 64, 64) #hide params = map(collect, ellipsoid_parameters(; fovs = shape)) #hide toft_settings = [1.0, -0.8, -0.2, -0.2, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] #hide for idx in eachindex(toft_settings) #hide @@ -48,10 +48,10 @@ imag_filter = reconstruct(algo_filter, sinograms); # Finally we can visualize the results: fig = Figure() for i = 1:3 - plot_image(fig[i,1], reverse(images[:, :, 48, i])) - plot_image(fig[i,2], sinograms[:, :, 48, i]) - plot_image(fig[i,3], reverse(imag_back[:, :, 48, i])) - plot_image(fig[i,4], reverse(imag_filter[:, :, 48, i])) + plot_image(fig[i,1], reverse(images[:, :, 24, i])) + plot_image(fig[i,2], sinograms[:, :, 24, i]) + plot_image(fig[i,3], reverse(imag_back[:, :, 24, i])) + plot_image(fig[i,4], reverse(imag_filter[:, :, 24, i])) end resize_to_layout!(fig) fig diff --git a/docs/src/literate/example/4_iterative.jl b/docs/src/literate/example/4_iterative.jl new file mode 100644 index 0000000..35905a5 --- /dev/null +++ b/docs/src/literate/example/4_iterative.jl @@ -0,0 +1,82 @@ +include("../../literate/example/1_interface.jl") #hide +using RadonKA #hide + +# # Iterative Reconstruction +# In this section we implement a more complex iterative reconstruction algorithm. +# We will use iterative solvers provided by [RegularizedLeastSquares.jl](https://github.com/JuliaImageRecon/RegularizedLeastSquares.jl). These solver feature a variety of arguments, in this example we will just focus on the parameters for the number of iterations and regularization. +# Each solver requires either a matrix or a matrix-free linear operator which implements multiplication with a vector as input. We will use [LinearOperatorCollection.jl](https://github.com/JuliaImageRecon/LinearOperatorCollection.jl), which implements a wrapper around RadonKA. + +# For this example, we will further assume that the construction of this operator is costly and should be done only once. This means our algorithm will be stateful and has to store the operator. + +# ## Parameters and Processing +# We will start by defining the parameters for the algorithm and the processing steps. Afterwards we can implement the algorithm itself. Since we will use the same preprocessing as for the direct reconstruction, we can reuse the parameters and processing steps and jump directly to the iterative parameters: +using RegularizedLeastSquares, LinearOperatorCollection +abstract type AbstractIterativeRadonReconstructionParameters <: AbstractRadonReconstructionParameters end +Base.@kwdef struct IterativeRadonReconstructionParameters{S <: AbstractLinearSolver, R <: AbstractRegularization, N} <: AbstractIterativeRadonReconstructionParameters + solver::Type{S} + iterations::Int64 + reg::Vector{R} + shape::NTuple{N, Int64} + angles::Vector{Float64} +end +# The parameters of this struct can be grouped into three catergories. The solver type just specifies which solver to use. The number of iterations and the regularization term could be abstracted into a nested `AbstractRadonParameter` which describe the parameters for the solver. Lastly the shape and angles are required to construct the linear operator. + +# Since we want to construct the linear operator only once, we will write the `process` method with the operator as a given argument: +function AbstractImageReconstruction.process(::Type{<:AbstractIterativeRadonAlgorithm}, params::IterativeRadonReconstructionParameters, op, data::AbstractArray{T, 4}) where {T} + solver = createLinearSolver(params.solver, op; iterations = params.iterations, reg = params.reg) + + result = similar(data, params.shape..., size(data, 4)) + + for i = 1:size(data, 4) + result[:, :, :, i] = solve!(solver, vec(data[:, :, :, i])) + end + + return result +end + +# Later we need to define to create the operator and pass it to this `process` method. + +# ## Algorithm +# Similar to the direct reconstruction algorithm, we want our iterative algorithm to accept both preprocessing and reconstruction parameters. We will encode this in a new type: +Base.@kwdef struct IterativeRadonParameters{P, R} <: AbstractRadonParameters where {P<:AbstractRadonPreprocessingParameters, R<:AbstractIterativeRadonReconstructionParameters} + pre::P + reco::R +end +# Instead of defining essentially the same struct again, we could also define a more generic one and specify the supported reconstruction parameter as type constraints in the algorithm constructor. + +# Unlike the direct reconstruction algorithm, the iterative algorithm has to store the linear operator. We will store it as a field in the algorithm type: +mutable struct IterativeRadonAlgorithm{D} <: AbstractIterativeRadonAlgorithm where D <: IterativeRadonParameters + parameter::D + op::Union{Nothing, AbstractLinearOperator} + output::Channel{Any} +end + +# We will set the operator to `nothing` in the constructor: +function IterativeRadonAlgorithm(parameter::D) where D + return IterativeRadonAlgorithm{D}(parameter, nothing, Channel{Any}(Inf)) +end + +# Next we implement the `process` method for our reconstruction parameters and an algorithm instance. This allows us to access the operator and pass it to the processing step: +function AbstractImageReconstruction.process(algo::IterativeRadonAlgorithm, params::IterativeRadonParameters{P, R}, data::AbstractArray{T, 4}) where {T, P<:AbstractRadonPreprocessingParameters, R<:AbstractIterativeRadonReconstructionParameters} + data = process(algo, params.pre, data) + return process(algo, params.reco, algo.op, data) +end + +# Note that initially the operator is `nothing` and the processing step would fail as it stands. To "fix" this we define a `process` method for the algorithm instance which creates the operator and stores it in the algorithm: +function AbstractImageReconstruction.process(algo::IterativeRadonAlgorithm, params::IterativeRadonReconstructionParameters, ::Nothing, data::AbstractArray{T, 4}) where {T} + op = RadonOp(T; shape = params.shape, angles = params.angles) + algo.op = op + return process(AbstractIterativeRadonAlgorithm, params, op, data) +end + +# As it stands our algorithm is not type stable. To fix this, we would need to know the element type during construction. Which is possible with a different parameterization of the algorithm. We will not do this here. +# Often times the performance impact of this is negligible as the critical sections are in the preprocessing or the iterative solver, especially since we still dispatch on the operator. + +# To finish up the implementation we need to implement the `put!`, `take!` and `parameters` functions: +Base.take!(algo::IterativeRadonAlgorithm) = Base.take!(algo.output) +function Base.put!(algo::IterativeRadonAlgorithm, data::AbstractArray{T, 4}) where {T} + lock(algo.output) do + put!(algo.output, process(algo, algo.parameter, data)) + end +end +AbstractImageReconstruction.parameter(algo::IterativeRadonAlgorithm) = algo.parameter diff --git a/docs/src/literate/example/5_iterative_result.jl b/docs/src/literate/example/5_iterative_result.jl new file mode 100644 index 0000000..5b92cf3 --- /dev/null +++ b/docs/src/literate/example/5_iterative_result.jl @@ -0,0 +1,59 @@ +include("../../literate/example/1_interface.jl") #hide +include("../../literate/example/2_direct.jl") #hide +include("../../literate/example/4_iterative.jl") #hide +using RadonKA, ImagePhantoms, ImageGeoms, CairoMakie, AbstractImageReconstruction #hide +using CairoMakie #hide +function plot_image(figPos, img; title = "", width = 150, height = 150) #hide + ax = CairoMakie.Axis(figPos[1, 1]; yreversed=true, title, width, height) #hide + hidedecorations!(ax) #hide + hm = heatmap!(ax, img) #hide + Colorbar(figPos[1, 2], hm) #hide +end #hide +angles = collect(range(0, π, 256)) #hide +shape = (64, 64, 64) #hide +params = map(collect, ellipsoid_parameters(; fovs = shape)) #hide +toft_settings = [1.0, -0.8, -0.2, -0.2, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] #hide +for idx in eachindex(toft_settings) #hide + params[idx][10] = toft_settings[idx] #hide +end #hide +ob = ellipsoid(map(Tuple, params)) #hide +ig = ImageGeom(;dims = shape) #hide +image = phantom(axes(ig)..., ob) #hide +sinogram = Array(RadonKA.radon(image, angles)) #hide +sinograms = similar(sinogram, size(sinogram)..., 5) #hide +images = similar(image, size(image)..., 5) #hide +for (i, intensity) in enumerate(range(params[3][end], 0.3, 5)) #hide + params[3][end] = intensity #hide + local ob = ellipsoid(map(Tuple, params)) #hide + local ig = ImageGeom(;dims = shape) #hide + images[:, :, :, i] = phantom(axes(ig)..., ob) #hide + sinograms[:, :, :, i] = Array(RadonKA.radon(images[:, :, :, i], angles)) #hide +end #hide + + +# # Direct Reconstruction Result +# Now that we have implemented our direct reconstruction algorithm, we can use it to reconstruct for example the first three images of our time series. +# We first prepare our parameters: +pre = RadonPreprocessingParameters(frames = collect(1:3)) +iter_reco = IterativeRadonReconstructionParameters(; shape = size(images)[1:3], angles = angles, iterations = 20, reg = [L2Regularization(0.001), PositiveRegularization()], solver = CGNR) + +# Then we can construct the algorithms: +algo_iter = IterativeRadonAlgorithm(IterativeRadonParameters(pre, iter_reco)) + +# And apply them to our sinograms: +imag_iter = reconstruct(algo_iter, sinograms) + +# Finally we can visualize the results: +fig = Figure() +for i = 1:3 + plot_image(fig[i,1], reverse(images[:, :, 24, i])) + plot_image(fig[i,2], sinograms[:, :, 24, i]) + plot_image(fig[i,3], reverse(imag_iter[:, :, 24, i])) +end +resize_to_layout!(fig) +fig + +# To add new functionality to our direct reconstruction algorithm, we can simply add new preprocessing or reconstruction parameters and implement the according processing steps. This way we can easily extend our algorithm to support new features. +# We can also define our own file format to store the results of our reconstruction algorithms or dispatch our preprocessing on files and pass files instead of the sinograms directly. + +# The way we use the algorithms here, requires the user to reconstruct the algorithm for each changed parameter. After we implement the more complex iterative reconstructions, we will take a look at how to make this process more manageable. \ No newline at end of file From aee5ebafe209310e0eeecaad2f16214754c40ed6 Mon Sep 17 00:00:00 2001 From: nHackel Date: Tue, 3 Dec 2024 18:51:07 +0100 Subject: [PATCH 3/7] Suppress output in iter results --- docs/src/literate/example/5_iterative_result.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/literate/example/5_iterative_result.jl b/docs/src/literate/example/5_iterative_result.jl index 5b92cf3..96b2c6f 100644 --- a/docs/src/literate/example/5_iterative_result.jl +++ b/docs/src/literate/example/5_iterative_result.jl @@ -35,13 +35,13 @@ end #hide # Now that we have implemented our direct reconstruction algorithm, we can use it to reconstruct for example the first three images of our time series. # We first prepare our parameters: pre = RadonPreprocessingParameters(frames = collect(1:3)) -iter_reco = IterativeRadonReconstructionParameters(; shape = size(images)[1:3], angles = angles, iterations = 20, reg = [L2Regularization(0.001), PositiveRegularization()], solver = CGNR) +iter_reco = IterativeRadonReconstructionParameters(; shape = size(images)[1:3], angles = angles, iterations = 20, reg = [L2Regularization(0.001), PositiveRegularization()], solver = CGNR); # Then we can construct the algorithms: -algo_iter = IterativeRadonAlgorithm(IterativeRadonParameters(pre, iter_reco)) +algo_iter = IterativeRadonAlgorithm(IterativeRadonParameters(pre, iter_reco)); # And apply them to our sinograms: -imag_iter = reconstruct(algo_iter, sinograms) +imag_iter = reconstruct(algo_iter, sinograms); # Finally we can visualize the results: fig = Figure() From 6aaad96dc69ac8768843988ca91c485f1e4324cf Mon Sep 17 00:00:00 2001 From: nHackel Date: Thu, 5 Dec 2024 09:01:19 +0100 Subject: [PATCH 4/7] Simplify example code loading --- docs/src/literate/example/3_direct_result.jl | 32 +----- .../literate/example/5_iterative_result.jl | 97 +++++++++++-------- docs/src/literate/example/example_include.jl | 31 ++++++ 3 files changed, 88 insertions(+), 72 deletions(-) create mode 100644 docs/src/literate/example/example_include.jl diff --git a/docs/src/literate/example/3_direct_result.jl b/docs/src/literate/example/3_direct_result.jl index 4bef270..8105e56 100644 --- a/docs/src/literate/example/3_direct_result.jl +++ b/docs/src/literate/example/3_direct_result.jl @@ -1,34 +1,4 @@ -include("../../literate/example/1_interface.jl") #hide -include("../../literate/example/2_direct.jl") #hide -using RadonKA, ImagePhantoms, ImageGeoms, CairoMakie, AbstractImageReconstruction #hide -using CairoMakie #hide -function plot_image(figPos, img; title = "", width = 150, height = 150) #hide - ax = CairoMakie.Axis(figPos[1, 1]; yreversed=true, title, width, height) #hide - hidedecorations!(ax) #hide - hm = heatmap!(ax, img) #hide - Colorbar(figPos[1, 2], hm) #hide -end #hide -angles = collect(range(0, π, 256)) #hide -shape = (64, 64, 64) #hide -params = map(collect, ellipsoid_parameters(; fovs = shape)) #hide -toft_settings = [1.0, -0.8, -0.2, -0.2, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] #hide -for idx in eachindex(toft_settings) #hide - params[idx][10] = toft_settings[idx] #hide -end #hide -ob = ellipsoid(map(Tuple, params)) #hide -ig = ImageGeom(;dims = shape) #hide -image = phantom(axes(ig)..., ob) #hide -sinogram = Array(RadonKA.radon(image, angles)) #hide -sinograms = similar(sinogram, size(sinogram)..., 5) #hide -images = similar(image, size(image)..., 5) #hide -for (i, intensity) in enumerate(range(params[3][end], 0.3, 5)) #hide - params[3][end] = intensity #hide - local ob = ellipsoid(map(Tuple, params)) #hide - local ig = ImageGeom(;dims = shape) #hide - images[:, :, :, i] = phantom(axes(ig)..., ob) #hide - sinograms[:, :, :, i] = Array(RadonKA.radon(images[:, :, :, i], angles)) #hide -end #hide - +include("../../literate/example/example_include.jl") #hide # # Direct Reconstruction Result # Now that we have implemented our direct reconstruction algorithm, we can use it to reconstruct for example the first three images of our time series. diff --git a/docs/src/literate/example/5_iterative_result.jl b/docs/src/literate/example/5_iterative_result.jl index 96b2c6f..9d4b275 100644 --- a/docs/src/literate/example/5_iterative_result.jl +++ b/docs/src/literate/example/5_iterative_result.jl @@ -1,46 +1,15 @@ -include("../../literate/example/1_interface.jl") #hide -include("../../literate/example/2_direct.jl") #hide -include("../../literate/example/4_iterative.jl") #hide -using RadonKA, ImagePhantoms, ImageGeoms, CairoMakie, AbstractImageReconstruction #hide -using CairoMakie #hide -function plot_image(figPos, img; title = "", width = 150, height = 150) #hide - ax = CairoMakie.Axis(figPos[1, 1]; yreversed=true, title, width, height) #hide - hidedecorations!(ax) #hide - hm = heatmap!(ax, img) #hide - Colorbar(figPos[1, 2], hm) #hide -end #hide -angles = collect(range(0, π, 256)) #hide -shape = (64, 64, 64) #hide -params = map(collect, ellipsoid_parameters(; fovs = shape)) #hide -toft_settings = [1.0, -0.8, -0.2, -0.2, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] #hide -for idx in eachindex(toft_settings) #hide - params[idx][10] = toft_settings[idx] #hide -end #hide -ob = ellipsoid(map(Tuple, params)) #hide -ig = ImageGeom(;dims = shape) #hide -image = phantom(axes(ig)..., ob) #hide -sinogram = Array(RadonKA.radon(image, angles)) #hide -sinograms = similar(sinogram, size(sinogram)..., 5) #hide -images = similar(image, size(image)..., 5) #hide -for (i, intensity) in enumerate(range(params[3][end], 0.3, 5)) #hide - params[3][end] = intensity #hide - local ob = ellipsoid(map(Tuple, params)) #hide - local ig = ImageGeom(;dims = shape) #hide - images[:, :, :, i] = phantom(axes(ig)..., ob) #hide - sinograms[:, :, :, i] = Array(RadonKA.radon(images[:, :, :, i], angles)) #hide -end #hide - - -# # Direct Reconstruction Result -# Now that we have implemented our direct reconstruction algorithm, we can use it to reconstruct for example the first three images of our time series. -# We first prepare our parameters: +include("../../literate/example/example_include.jl") #hide + +# # Iterative Reconstruction Result +# We can now use the iterative algorithms to reconstruct the first three images of our time series. +# We first prepare our parameters. For this example we will use the Conjugate Gradient Normal Residual solver with 20 iterations and a L2 regularization of 0.001. Furthermore, we will project the final result to positive values: pre = RadonPreprocessingParameters(frames = collect(1:3)) iter_reco = IterativeRadonReconstructionParameters(; shape = size(images)[1:3], angles = angles, iterations = 20, reg = [L2Regularization(0.001), PositiveRegularization()], solver = CGNR); -# Then we can construct the algorithms: +# Again we can construct the algorithm with our parameters: algo_iter = IterativeRadonAlgorithm(IterativeRadonParameters(pre, iter_reco)); -# And apply them to our sinograms: +# And apply it to our sinograms: imag_iter = reconstruct(algo_iter, sinograms); # Finally we can visualize the results: @@ -53,7 +22,53 @@ end resize_to_layout!(fig) fig -# To add new functionality to our direct reconstruction algorithm, we can simply add new preprocessing or reconstruction parameters and implement the according processing steps. This way we can easily extend our algorithm to support new features. -# We can also define our own file format to store the results of our reconstruction algorithms or dispatch our preprocessing on files and pass files instead of the sinograms directly. +# As was already mentioned for the direct reconstruction, the iterative algorithm also needs to be recreated for any parameter change. This can already be quite tedious with the number of parameters we have here. +# To make this process easier, we can use the `RecoPlan` feature of `AbstractImageReconstruction`. This allows us to define a plan for the reconstruction, which can then be used to easily reconstruct the images. + +# ## RecoPlan +# A `RecoPlan` is a thin-wrapper around nested key-value pairs that represent the same tree structure of our algorithm and parameters. The plan can be fully parametertized and then used to create the algorithm. +# But it can also miss parameters and describe the structure of the algorithm only. This can be useful to create a template for the reconstruction, which can be filled with parameters later on. + +# We can create a plan from our algorithm: +plan = toPlan(algo_iter) + +# The parameters of our plan can be accessed/traversed the same way as the algorithm: +plan.parameter.pre.frames == algo_iter.parameter.pre.frames + +# And each nested algorithm and parameter struct was converted to a plan: +typeof(plan.parameter.pre) + +# Unlike the algorithm, we can easily change the parameters of the plan: +plan.parameter.reco.iterations = 30 +plan.parameter.pre.frames = collect(1:5) + +# Instead of traversing the properties of the plan/algorithm, we can also use the `setAll!` function to set all parameters of the same of the plan at once: +setAll!(plan, :solver, FISTA); +# This also works with dictionaries of symbols and values: +dict = Dict{Symbol, Any}(:reg => [L1Regularization(0.001), PositiveRegularization()]) +setAll!(plan, dict); + +# Once we have parametertized our plan, we can build the algorithm from it: +algo_iter = build(plan) +imag_iter = reconstruct(algo_iter, sinograms) +fig = Figure() +for i = 1:5 + plot_image(fig[i,1], reverse(images[:, :, 24, i])) + plot_image(fig[i,2], sinograms[:, :, 24, i]) + plot_image(fig[i,3], reverse(imag_iter[:, :, 24, i])) +end +resize_to_layout!(fig) +fig + +# It's also possible to clear the plan and remove all set parameters. By default this preserves the structure of the plan. Such a plan can be used as a template for further reconstructions and stored as a TOML file: +toTOML(stdout, plan) # to save to file use `savePlan("path/to/file.toml", plan)` + +# After clearing we just have the structure of the plan left: +clear!(plan) +toTOML(stdout, plan) # to save to file use `savePlan("path/to/file.toml", plan)` + +# Note that the serialization here is not the same as storing the binary representation of the algorithm or the RecoPlan. +# We essentially store key-value pairs which can be used in keyword-argument constructors to recreate the algorithm, however depending on the version of our Reco package, the underlying structs might change. +# It is also possible to define custom serialization and deserialization functions for a plan's parameters. -# The way we use the algorithms here, requires the user to reconstruct the algorithm for each changed parameter. After we implement the more complex iterative reconstructions, we will take a look at how to make this process more manageable. \ No newline at end of file +# For more information on `RecoPlan`, see the how-to guides in the documentation. \ No newline at end of file diff --git a/docs/src/literate/example/example_include.jl b/docs/src/literate/example/example_include.jl new file mode 100644 index 0000000..6d95fd6 --- /dev/null +++ b/docs/src/literate/example/example_include.jl @@ -0,0 +1,31 @@ +include("../../literate/example/1_interface.jl") +include("../../literate/example/2_direct.jl") +include("../../literate/example/4_iterative.jl") +using RadonKA, ImagePhantoms, ImageGeoms, CairoMakie, AbstractImageReconstruction +using CairoMakie +function plot_image(figPos, img; title = "", width = 150, height = 150) + ax = CairoMakie.Axis(figPos[1, 1]; yreversed=true, title, width, height) + hidedecorations!(ax) + hm = heatmap!(ax, img) + Colorbar(figPos[1, 2], hm) +end +angles = collect(range(0, π, 256)) +shape = (64, 64, 64) +params = map(collect, ellipsoid_parameters(; fovs = shape)) +toft_settings = [1.0, -0.8, -0.2, -0.2, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] +for idx in eachindex(toft_settings) + params[idx][10] = toft_settings[idx] +end +ob = ellipsoid(map(Tuple, params)) +ig = ImageGeom(;dims = shape) +image = phantom(axes(ig)..., ob) +sinogram = Array(RadonKA.radon(image, angles)) +sinograms = similar(sinogram, size(sinogram)..., 5) +images = similar(image, size(image)..., 5) +for (i, intensity) in enumerate(range(params[3][end], 0.3, 5)) + params[3][end] = intensity + local ob = ellipsoid(map(Tuple, params)) + local ig = ImageGeom(;dims = shape) + images[:, :, :, i] = phantom(axes(ig)..., ob) + sinograms[:, :, :, i] = Array(RadonKA.radon(images[:, :, :, i], angles)) +end From 88fdf4ecf78814d931ed4528f930b2f336858c38 Mon Sep 17 00:00:00 2001 From: nHackel Date: Thu, 5 Dec 2024 09:01:34 +0100 Subject: [PATCH 5/7] Update example intro --- docs/src/example_intro.md | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/docs/src/example_intro.md b/docs/src/example_intro.md index f95b459..6d1ff10 100644 --- a/docs/src/example_intro.md +++ b/docs/src/example_intro.md @@ -1,16 +1,18 @@ # Small Reconstruction Package for Radon projections In this example we will implement a small image reconstruction package with the help of `AbstractImageReconstruction.jl`. Our example reconstruction package aims to provide direct and iterative reconstruction algorithms for Radon projection data. -Most of the desired functionality is already implemented in various Julia packages. Our reconstruction packages now needs to properly connect these packages and transform the data into the appropriate formats for each package. +Most of the desired functionality is already implemented in various Julia packages. Our reconstruction packages now needs to properly connect these packages and transform the data into the appropriate formats for each package. + +The example is intended for developers of reconstruction packages that use `AbstractImageReconstruction`. End-users of such a package can consult the result sections of the example to see the high-level interface of `AbstractImagerReconstruction` and should otherwise consult the documentation of the concrete reconstruction package itself. ## Installation -In addition to AbstractImageReconstruction.jl, we will need a few more packages to get started. We can install these packages using the Julia package manager. Open a Julia REPL and run the following command: +We can install `AbstractImageReconstruction` using the Julia package manager. Open a Julia REPL and run the following command: ```julia using Pkg Pkg.add("AbstractImageReconstruction") ``` -This will download and install AbstractImageReconstruction.jl and its dependencies. To install a different version, please consult the [Pkg documentation](https://pkgdocs.julialang.org/dev/managing-packages/#Adding-packages). +This will download and install AbstractImageReconstruction.jl and its dependencies. To install a different version, please consult the [Pkg documentation](https://pkgdocs.julialang.org/dev/managing-packages/#Adding-packages). In addition to AbstractImageReconstruction.jl, we will need a few more packages to get started, which we can install the same way. [RadonKA.jl](https://github.com/roflmaostc/RadonKA.jl/tree/main) provides us with fast Radon forward and backprojections, which we can use for direct reconstructions and preparing example data for our package. @@ -24,4 +26,16 @@ This will download and install AbstractImageReconstruction.jl and its dependenci Lastly, we will use [CairoMakie.jl](https://docs.makie.org/stable/) to visualize our results. ## Outline +[Radon Data](generated/example/0_radon_data.md): In this section we get familiar with RadonKA.jl and define a small dataformat for three-dimensional time-series sinograms. We also create the inverse problem, which we want to solve in the remainder of the example. + +[Interface](generated/example/1_interface.md): Here we define the abstract types we will use in our package and take a look at what we need to implement to interact with `AbstractImageReconstruction`. We also start with a first processing step of our algorithms. + +[Direct Reconstruction](generated/example/2_direct.md): Now we extend our abstract types with a concrete implementation of reconstruction algorithms using the backprojection and filtered backprojection. + +[Direct Reconstruction Result](generated/example/3_direct_result.md): This section shows how to use the algorithm we just implemented. + +[Iterative Reconstruction](generated/example/4_iterative.md): We finish our small example package by implementing an iterative reconstruction algorithm. For this algorithm we require more complex parametrization and data processing. + +[Iterative Reconstruction Result](generated/example/5_iterative_result.md): The last section again shows how to use the just implemented algorithm. But it also highlights `RecoPlans`, which are a core utility of `AbstractImageReconstruction`. These plans allow a user to easily configure, store and load algorithms as templates. +For an even more indepth reconstruction package we refer to the magnetic particle imaging reconstruction package [MPIReco.jl](https://github.com/MagneticParticleImaging/MPIReco.jl). From 542c535ebd894c2fdb4c541acfb2075f6ef97371 Mon Sep 17 00:00:00 2001 From: nHackel Date: Tue, 10 Dec 2024 17:37:15 +0100 Subject: [PATCH 6/7] Update docs --- docs/Project.toml | 1 + docs/make.jl | 6 +- docs/src/example_intro.md | 3 +- docs/src/index.md | 4 +- docs/src/literate/example/1_interface.jl | 1 + docs/src/literate/example/2_direct.jl | 5 +- docs/src/literate/example/3_direct_result.jl | 2 +- docs/src/literate/example/4_iterative.jl | 9 +- .../literate/example/5_iterative_result.jl | 2 +- .../literate/example/example_include_all.jl | 2 + ...ple_include.jl => example_include_data.jl} | 8 +- .../example/example_include_module.jl | 12 +++ docs/src/literate/howto/caching.jl | 69 ++++++++++++++ docs/src/literate/howto/observables.jl | 63 +++++++++++++ docs/src/literate/howto/serialization.jl | 89 +++++++++++++++++++ 15 files changed, 257 insertions(+), 19 deletions(-) create mode 100644 docs/src/literate/example/example_include_all.jl rename docs/src/literate/example/{example_include.jl => example_include_data.jl} (86%) create mode 100644 docs/src/literate/example/example_include_module.jl create mode 100644 docs/src/literate/howto/caching.jl create mode 100644 docs/src/literate/howto/observables.jl create mode 100644 docs/src/literate/howto/serialization.jl diff --git a/docs/Project.toml b/docs/Project.toml index aa5ebf1..ce0c136 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -7,6 +7,7 @@ ImageGeoms = "9ee76f2b-840d-4475-b6d6-e485c9297852" ImagePhantoms = "71a99df6-f52c-4da1-bd2a-69d6f37f3252" LinearOperatorCollection = "a4a2c56f-fead-462a-a3ab-85921a5f2575" Literate = "98b081ad-f1c9-55d3-8b20-4c87d4299306" +Observables = "510215fc-4207-5dde-b226-833fc4488ee2" RadonKA = "86de8297-835b-47df-b249-c04e8db91db5" RegularizedLeastSquares = "1e9c538a-f78c-5de5-8ffb-0b6dbe892d23" diff --git a/docs/make.jl b/docs/make.jl index 17fa829..904a052 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -36,9 +36,9 @@ makedocs( "Iterative Reconstruction Result" => "generated/example/5_iterative_result.md", ], "How to" => Any[ - #"Construct RecoPlan" => "generated/howto/reco_plan.md", - #"Caching" => "generated/howto/caching.md", - #"Listeners" => "generated/howto/listeners.md", + "Serialization" => "generated/howto/serialization.md", + "Caching" => "generated/howto/caching.md", + "Observables" => "generated/howto/observables.md", ], #"API Reference" => Any["Solvers" => "API/solvers.md", #"Regularization Terms" => "API/regularization.md"], diff --git a/docs/src/example_intro.md b/docs/src/example_intro.md index 6d1ff10..e8f4032 100644 --- a/docs/src/example_intro.md +++ b/docs/src/example_intro.md @@ -3,7 +3,8 @@ In this example we will implement a small image reconstruction package with the Most of the desired functionality is already implemented in various Julia packages. Our reconstruction packages now needs to properly connect these packages and transform the data into the appropriate formats for each package. -The example is intended for developers of reconstruction packages that use `AbstractImageReconstruction`. End-users of such a package can consult the result sections of the example to see the high-level interface of `AbstractImagerReconstruction` and should otherwise consult the documentation of the concrete reconstruction package itself. +!!! note + The example is intended for developers of reconstruction packages that use `AbstractImageReconstruction`. End-users of such a package can consult the result sections of the example to see the high-level interface of `AbstractImagerReconstruction` and should otherwise consult the documentation of the concrete reconstruction package itself. ## Installation We can install `AbstractImageReconstruction` using the Julia package manager. Open a Julia REPL and run the following command: diff --git a/docs/src/index.md b/docs/src/index.md index 6b3ddaa..08bca82 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -8,9 +8,9 @@ AbstractImageReconstruction.jl is a Julia package that serves as the core API fo ## Features -* Variety of optimization algorithms optimized for least squares problems * Storing, loading and manipulating of reconstruction algorithms with (partially) set parameters -* Attaching callback listeners to parameters +* Attaching callbacks to parameters changes with Observables.jl +* Transparent caching of intermediate reconstruction results ## Installation diff --git a/docs/src/literate/example/1_interface.jl b/docs/src/literate/example/1_interface.jl index dafede0..35be854 100644 --- a/docs/src/literate/example/1_interface.jl +++ b/docs/src/literate/example/1_interface.jl @@ -12,6 +12,7 @@ # For our package we extend these abstract types with our own abstract subtypes: using AbstractImageReconstruction +export AbstractRadonAlgorithm, AbstractRadonParameters, AbstractRadonPreprocessingParameters, AbstractRadonReconstructionParameters, AbstractDirectRadonAlgorithm, AbstractIterativeRadonAlgorithm, RadonPreprocessingParameters # hide abstract type AbstractRadonAlgorithm <: AbstractImageReconstructionAlgorithm end abstract type AbstractRadonParameters <: AbstractImageReconstructionParameters end diff --git a/docs/src/literate/example/2_direct.jl b/docs/src/literate/example/2_direct.jl index f68a80b..1fb6887 100644 --- a/docs/src/literate/example/2_direct.jl +++ b/docs/src/literate/example/2_direct.jl @@ -1,5 +1,6 @@ include("../../literate/example/1_interface.jl") #hide using RadonKA #hide +export AbstractDirectRadonReconstructionParameters, RadonFilteredBackprojectionParameters, RadonBackprojectionParameters, DirectRadonParameters, DirectRadonAlgorithm #hide # # Direct Reconstruction # To implement our direct reconstruction algorithms we need to define a few more methods and types. We will start by defining the parameters for the backprojection and for the filtered backprojection. Afterwards we can implement the algorithm itself. @@ -33,7 +34,7 @@ AbstractImageReconstruction.process(::Type{<:AbstractDirectRadonAlgorithm}, para # ## Algorithm # The direct reconstruction algorithm has essentially no state to store between reconstructions and thus only needs its parameters as fields. We want our algorithm to accept any combination of our preprocessing and direct reconstruction parameters. # This we encode in a new type: -Base.@kwdef struct DirectRadonParameters{P, R} <: AbstractRadonParameters where {P<:AbstractRadonPreprocessingParameters, R<:AbstractDirectRadonReconstructionParameters} +Base.@kwdef struct DirectRadonParameters{P <: AbstractRadonPreprocessingParameters, R <: AbstractDirectRadonReconstructionParameters} <: AbstractRadonParameters pre::P reco::R end @@ -44,7 +45,7 @@ function AbstractImageReconstruction.process(algoT::Type{<:AbstractDirectRadonAl end # Now we can define the algorithm type itself. Algorithms are usually constructed with one argument passing in the user parameters: -mutable struct DirectRadonAlgorithm{D} <: AbstractDirectRadonAlgorithm where D <: DirectRadonParameters +mutable struct DirectRadonAlgorithm{D <: DirectRadonParameters} <: AbstractDirectRadonAlgorithm parameter::D output::Channel{Any} DirectRadonAlgorithm(parameter::D) where D = new{D}(parameter, Channel{Any}(Inf)) diff --git a/docs/src/literate/example/3_direct_result.jl b/docs/src/literate/example/3_direct_result.jl index 8105e56..539d7a0 100644 --- a/docs/src/literate/example/3_direct_result.jl +++ b/docs/src/literate/example/3_direct_result.jl @@ -1,4 +1,4 @@ -include("../../literate/example/example_include.jl") #hide +include("../../literate/example/example_include_all.jl") #hide # # Direct Reconstruction Result # Now that we have implemented our direct reconstruction algorithm, we can use it to reconstruct for example the first three images of our time series. diff --git a/docs/src/literate/example/4_iterative.jl b/docs/src/literate/example/4_iterative.jl index 35905a5..ac71dd9 100644 --- a/docs/src/literate/example/4_iterative.jl +++ b/docs/src/literate/example/4_iterative.jl @@ -1,5 +1,6 @@ include("../../literate/example/1_interface.jl") #hide using RadonKA #hide +export AbstractIterativeRadonReconstructionParameters, IterativeRadonReconstructionParameters, IterativeRadonParameters, IterativeRadonAlgorithm #hide # # Iterative Reconstruction # In this section we implement a more complex iterative reconstruction algorithm. @@ -38,14 +39,14 @@ end # ## Algorithm # Similar to the direct reconstruction algorithm, we want our iterative algorithm to accept both preprocessing and reconstruction parameters. We will encode this in a new type: -Base.@kwdef struct IterativeRadonParameters{P, R} <: AbstractRadonParameters where {P<:AbstractRadonPreprocessingParameters, R<:AbstractIterativeRadonReconstructionParameters} +Base.@kwdef struct IterativeRadonParameters{P<:AbstractRadonPreprocessingParameters, R<:AbstractIterativeRadonReconstructionParameters} <: AbstractRadonParameters pre::P reco::R end # Instead of defining essentially the same struct again, we could also define a more generic one and specify the supported reconstruction parameter as type constraints in the algorithm constructor. # Unlike the direct reconstruction algorithm, the iterative algorithm has to store the linear operator. We will store it as a field in the algorithm type: -mutable struct IterativeRadonAlgorithm{D} <: AbstractIterativeRadonAlgorithm where D <: IterativeRadonParameters +mutable struct IterativeRadonAlgorithm{D <: IterativeRadonParameters} <: AbstractIterativeRadonAlgorithm parameter::D op::Union{Nothing, AbstractLinearOperator} output::Channel{Any} @@ -69,7 +70,7 @@ function AbstractImageReconstruction.process(algo::IterativeRadonAlgorithm, para return process(AbstractIterativeRadonAlgorithm, params, op, data) end -# As it stands our algorithm is not type stable. To fix this, we would need to know the element type during construction. Which is possible with a different parameterization of the algorithm. We will not do this here. +# Our algorithm is not type stable. To fix this, we would need to know the element type of the sinograms during construction. Which is possible with a different parameterization of the algorithm. We will not do this here. # Often times the performance impact of this is negligible as the critical sections are in the preprocessing or the iterative solver, especially since we still dispatch on the operator. # To finish up the implementation we need to implement the `put!`, `take!` and `parameters` functions: @@ -79,4 +80,4 @@ function Base.put!(algo::IterativeRadonAlgorithm, data::AbstractArray{T, 4}) whe put!(algo.output, process(algo, algo.parameter, data)) end end -AbstractImageReconstruction.parameter(algo::IterativeRadonAlgorithm) = algo.parameter +AbstractImageReconstruction.parameter(algo::IterativeRadonAlgorithm) = algo.parameter \ No newline at end of file diff --git a/docs/src/literate/example/5_iterative_result.jl b/docs/src/literate/example/5_iterative_result.jl index 9d4b275..6fea879 100644 --- a/docs/src/literate/example/5_iterative_result.jl +++ b/docs/src/literate/example/5_iterative_result.jl @@ -1,4 +1,4 @@ -include("../../literate/example/example_include.jl") #hide +include("../../literate/example/example_include_all.jl") #hide # # Iterative Reconstruction Result # We can now use the iterative algorithms to reconstruct the first three images of our time series. diff --git a/docs/src/literate/example/example_include_all.jl b/docs/src/literate/example/example_include_all.jl new file mode 100644 index 0000000..5ebd03d --- /dev/null +++ b/docs/src/literate/example/example_include_all.jl @@ -0,0 +1,2 @@ +include("../../literate/example/example_include_module.jl") +include("../../literate/example/example_include_data.jl") \ No newline at end of file diff --git a/docs/src/literate/example/example_include.jl b/docs/src/literate/example/example_include_data.jl similarity index 86% rename from docs/src/literate/example/example_include.jl rename to docs/src/literate/example/example_include_data.jl index 6d95fd6..9b380bd 100644 --- a/docs/src/literate/example/example_include.jl +++ b/docs/src/literate/example/example_include_data.jl @@ -1,8 +1,6 @@ -include("../../literate/example/1_interface.jl") -include("../../literate/example/2_direct.jl") -include("../../literate/example/4_iterative.jl") -using RadonKA, ImagePhantoms, ImageGeoms, CairoMakie, AbstractImageReconstruction +using RadonKA, ImagePhantoms, ImageGeoms, CairoMakie, AbstractImageReconstruction, RegularizedLeastSquares using CairoMakie +using .OurRadonReco function plot_image(figPos, img; title = "", width = 150, height = 150) ax = CairoMakie.Axis(figPos[1, 1]; yreversed=true, title, width, height) hidedecorations!(ax) @@ -28,4 +26,4 @@ for (i, intensity) in enumerate(range(params[3][end], 0.3, 5)) local ig = ImageGeom(;dims = shape) images[:, :, :, i] = phantom(axes(ig)..., ob) sinograms[:, :, :, i] = Array(RadonKA.radon(images[:, :, :, i], angles)) -end +end \ No newline at end of file diff --git a/docs/src/literate/example/example_include_module.jl b/docs/src/literate/example/example_include_module.jl new file mode 100644 index 0000000..610f258 --- /dev/null +++ b/docs/src/literate/example/example_include_module.jl @@ -0,0 +1,12 @@ +module OurRadonReco + +using RadonKA, ImagePhantoms, ImageGeoms, CairoMakie, AbstractImageReconstruction, RegularizedLeastSquares + + +include("../../literate/example/1_interface.jl") +include("../../literate/example/2_direct.jl") +include("../../literate/example/4_iterative.jl") + +end + +using .OurRadonReco \ No newline at end of file diff --git a/docs/src/literate/howto/caching.jl b/docs/src/literate/howto/caching.jl new file mode 100644 index 0000000..904b484 --- /dev/null +++ b/docs/src/literate/howto/caching.jl @@ -0,0 +1,69 @@ +include("../../literate/example/example_include_all.jl") #hide + + +# # Caching +# Image reconstruction algorithms can be computationally expensive. To avoid unnecessary recomputations, we can cache the results of processing steps. +# This can be especially helpful if a user wants to change parameters of an algorithm that only impact parts of the processing. +# We have seen a small example of caching in the `IterativeRadonAlgorithm` implementation. There the algorithm itself cached a result in its properties. +# `AbstractImageReconstruction` provides a more general caching mechanism that can be used for any processing step. However, the caching mechanism is not enabled by default and has to be explicitly implemented by the algorithm developer. + +# This How-To builds on the results of the example sections. + +# ## ProcessResultCache +# The caching mechanism is based on the `ProcessResultCache` type. This type wraps around a different `ÀbstractImageReconstructionParameter` and caches the result of a processing step. +# The cache itself is connected to a `RecoPlan` and any instances build from the same plan instance share this cache and can reuse the result of the processing step. + +# Let's implement the `ProcessResultCache` type for the Radon preprocessing step. We first define a struct a very costly preprocessing step: +Base.@kwdef struct CostlyPreprocessingParameters <: AbstractRadonPreprocessingParameters + frames::Vector{Int64} = [] + runtime::Float64 = 1.0 +end +function AbstractImageReconstruction.process(::Type{<:AbstractRadonAlgorithm}, params::CostlyPreprocessingParameters, data::AbstractArray{T, 4}) where {T} + frames = isempty(params.frames) ? (1:size(data, 4)) : params.frames + data = data[:, :, :, frames] + @info "Very costly preprocessing step" + sleep(params.runtime) + return data +end + +# Now we can define a processing step that internally uses another processing step. We allow this inner parameter to be cached by considering the following `Union`: +Base.@kwdef struct RadonCachedPreprocessingParameters{P <: AbstractRadonPreprocessingParameters} <: AbstractRadonPreprocessingParameters + params::Union{P, ProcessResultCache{P}} +end +# Note that this case is a bit artifical and a more sensible place would be the algorithm parameters themselves. However, for the case of simplicity we did not introduce the concept in the example. +# In this artifical case we just pass the parameters to the processing step. A real implementation might do some more processing with the result of the inner processing step: +AbstractImageReconstruction.process(algoT::Type{<:AbstractRadonAlgorithm}, params::RadonCachedPreprocessingParameters, args...) = process(algoT, params.params, args...) + +# We deliberaly implement the `process` function for algorithm type to avoid our cache being invalided by state changes of an algoritm instance. + +# Now we construct a plan for a direct reconstruction algorithm that uses the costly preprocessing step: +pre = CostlyPreprocessingParameters(; frames = collect(1:3), runtime = 1.0) +preCached = RadonCachedPreprocessingParameters(ProcessResultCache(pre, maxsize = 2)) +prePlan = toPlan(preCached) +recoPlan = RecoPlan(IterativeRadonReconstructionParameters; angles = angles, shape = size(images)[1:3], + iterations = 10, reg = [L2Regularization(0.001), PositiveRegularization()], solver = CGNR) +params = RecoPlan(IterativeRadonParameters; pre = prePlan, reco = recoPlan) +plan = RecoPlan(IterativeRadonAlgorithm; parameter = params) + +# When we built the algorithm from the plan, the costly preprocessing step is only executed once and the result is cached and can be reused: +algo = build(plan) +reconstruct(algo, sinograms); +reconstruct(algo, sinograms); + +# If we change the parameters of the algorithms without affecting the preprocessing step, we can still reuse the cached result: +setAll!(plan, :iterations, 5) +algo = build(plan) +reconstruct(algo, sinograms); +plan.parameter.reco.iterations == 5 + +# `ProcessResultCache` uses a least recently used (LRU) strategy to store the results. The cache size can be set by the user and defaults to 1. If the cache is full, the least recently used result is removed from the cache. + +# The cache is checked with a key formed with the hash of all arguments of the processing step. `AbstractImageReconstruction` provides a default `hash` method for AbstractImageReconstructionParameters`, which hashes all properties of a parameter. + +# Other methods of cache invalidation are creating a new plan or manual invalidation of the cache: +empty!(algo.parameter.pre.params) +reconstruct(algo, sinograms); + +# Caches support serialization like other `RecoPlans`: +clear!(plan) +toTOML(stdout, plan) \ No newline at end of file diff --git a/docs/src/literate/howto/observables.jl b/docs/src/literate/howto/observables.jl new file mode 100644 index 0000000..8fc99e3 --- /dev/null +++ b/docs/src/literate/howto/observables.jl @@ -0,0 +1,63 @@ +include("../../literate/example/example_include_all.jl") #hide + +# # Observables +# Observables from [Observables.jl](https://github.com/JuliaGizmos/Observables.jl) are containers which can invoke callbacks whenever their stored value is changed. +# Each property of a `RecoPlan` is an `Òbservable` to which functions can be attached. These function listen to changes of the Observables value. +# This can be used to store "logic" about the parameter within a plan, such as a function to update and visualize the current state of a plan or to calculate default values whenever a parameter changes. +# In this documentation we will focus on the interaction between `RecoPlans` and `Observables`. For more details on the `Observables` API we refer to the [package](https://juliagizmos.github.io/Observables.jl/stable/) and [Makie](https://docs.makie.org/dev/explanations/Observables) documentation. +using Observables +plan = RecoPlan(DirectRadonAlgorithm; parameter = RecoPlan(DirectRadonParameters; + pre = RecoPlan(RadonPreprocessingParameters; frames = collect(1:3)), + reco = RecoPlan(RadonBackprojectionParameters; angles = angles))) + +# You can interact with parameters as if they are "normal" properties of a nested struct, which we shown in previous examples: +length(plan.parameter.pre.frames) == 3 + +# Internally, these properties are stored as `Observables` to which we can attach functions: +on(plan.parameter.pre, :frames) do val + @info "Number of frames: $(length(val))" +end +setAll!(plan, :frames, collect(1:42)) + +# Clearing the plan also resets the `Observables` and removes all listeners: +clear!(plan) +setAll!(plan, :frames, collect(1:3)) +plan.parameter.pre.frames + +# To directly access the Observable of a property you can use `getindex` on the plan with the property name: +plan.parameter.pre[:frames] + +# `Observables` can also be used to connect two properties of a plan. For example, we can set the number of averages to the number of frames: +on(plan.parameter.pre, :frames) do val + plan.parameter.pre.numAverages = length(val) +end +setAll!(plan, :frames, collect(1:42)) +plan.parameter.pre.numAverages + +# It is important to avoid circular dependencies when connecting Observables, as this can lead to infinite loops of callbacks. +# Also note that the connection shown above will always overwrite the number of averages even if a user has set the value manually: +plan.parameter.pre.numAverages = 5 +setAll!(plan, :frames, collect(1:42)) +plan.parameter.pre.numAverages + +# To connect two properties without overwriting user-prvided values, we can use the `LinkedPropertyListener` provided by `AbstractImageReconstruction`: +clear!(plan) +listener = LinkedPropertyListener(plan.parameter.pre, :numAverages, plan.parameter.pre, :frames) do val + @info "Setting default numAverages value to: $(length(val))" + return length(val) +end +plan.parameter.pre.frames = collect(1:42) +plan.parameter.pre.numAverages = 1 +plan.parameter.pre.frames = collect(1:3) + +# The `LinkedPropertyListener` can also be serialized and deserialized with the plan. However, for the function to be properly serialized, it should be a named function: +clear!(plan) +defaultAverages(val) = length(val) +LinkedPropertyListener(defaultAverages, plan.parameter.pre, :numAverages, plan.parameter.pre, :frames) +plan.parameter.pre.frames = collect(1:42) +@info plan.parameter.pre.numAverages == 42 +toTOML(stdout, plan) + +# To serialize custom listener one can inherit from `AbstractPlanListener` and follow the serialization How-To to implement the serialization. +# Listener are deserialized after the plan is built and the parameters are set. This means that the listener can access the parameters of the plan and the plan itself. +# For deserialization the listener has to implement `loadListener!(::Type{<:AbstractPlanListener}, plan::RecoPlan, field::Symbol, dict::Dict{String, Any}, args...)`. \ No newline at end of file diff --git a/docs/src/literate/howto/serialization.jl b/docs/src/literate/howto/serialization.jl new file mode 100644 index 0000000..16cda97 --- /dev/null +++ b/docs/src/literate/howto/serialization.jl @@ -0,0 +1,89 @@ +include("../../literate/example/example_include_all.jl") #hide + +# # Serialization +# As was shown in the example, a `RecoPlan` `AbstractImageReconstruction` can be used to easily parametrize reconstruction algorithms or provide a template structure. +# Serializing and deserializing a plan can therefore be used to provide templates of algorithms as well as storing a fully parametrized algorithm to reproduce a reconstruction later on. +# The main goal of serialization is not to store and restore the concrete binary representation of the algorithm, but to store the parameters and the structure of the algorithm. +# Changes to parameters or algorithms internals could thus still be supported by a deserialized plan, as long as the keyword arguments of the constructor are still valid. + +# !!! warning +# Serialization is still in an experimental state and might change in the future. It is intended as a best-effort feature to provide a way to store and load plans. +# Depending on the Julia version, the reconstruction package in question and the complexity of custom structs used in the parameters, serialization might not work as expected. + +# `RecoPlans` are serialized as TOML files using the [TOML.jl](https://docs.julialang.org/en/v1/stdlib/TOML/) standard library: +pre = RadonPreprocessingParameters(frames = collect(1:3)) +back_reco = RadonBackprojectionParameters(;angles) +algo_back = DirectRadonAlgorithm(DirectRadonParameters(pre, back_reco)) +plan = toPlan(algo_back) +clear!(plan) +#toTOML(stdout, plan) + +# Before serialization as a TOML file, the plan is turned into a nested dictionary using the functions `toDict, toDict!, toDictValue` and `toDictValue!`. +# The top-level function is `toDict`: +toDict(plan) + +# This method creates a dictionary and records not only the value of the argument, but also the module and type name among other metadata. This metadata starts with a `_` and is used during deserialization to recreate the correct types. +# After creating the dictionary, the function `toDict!` is called to add the argument and its metadata to the dictionary. + +# The value-representation of the argument is added to the dictionary using the `toDictValue!` method. +# The default `toDictValue!` for structs with fields adds each field of the argument as a key-value pair with the value being provided by the `toDictValue` function. + +# While `AbstractImageReconstruction` tries to provide default implementations, multiple dispatch can be used for custom serialization of types. + +# As an example we will add a new parameter struct for a filtered backprojection process using a given geometry: +export CustomGeomFilteredBackprojectionParameters +Base.@kwdef struct CustomGeomFilteredBackprojectionParameters{G <: RadonGeometry} <: OurRadonReco.AbstractDirectRadonReconstructionParameters + angles::Vector{Float64} + filter::Union{Nothing, Vector{Float64}} = nothing + geometry::G +end +function AbstractImageReconstruction.process(::Type{<:AbstractDirectRadonAlgorithm}, params::CustomGeomFilteredBackprojectionParameters, data::AbstractArray{T, 3}) where {T} + return RadonKA.backproject_filtered(data, params.angles; filter = params.filter, geometry = params.geometry) +end + +# First we will take a look at the default serialization: +reco = RecoPlan(CustomGeomFilteredBackprojectionParameters; angles = [0.0], + geometry = RadonFlexibleCircle(size(sinograms, 1), [0.0, 0.0], [1.0, 1.0])) +toTOML(stdout, reco) + +# In this case the default seems to work, but we can also provide a custom serialization for the geometry. This is especially helpful for custom types that contain large amounts of data, which we don't want to serialize. +# An example of this could be a file with meeasurement data, where we just want to store the file path and not the whole data. + +# We want to serialize the geometry as a custom dictionary. For that we first need to override the default `toDictValue` method for the geometry: +AbstractImageReconstruction.toDictValue(value::RadonGeometry) = toDict(value) + +# This method is called when the fields for the `CustomGeomFilteredBackprojectionParameters` are serialized. + +# Now that we create a custom dict representation, we can override the behaviour after the metadata is recorded. For that we specialize the `toDictValue!` method: +function AbstractImageReconstruction.toDictValue!(dict, value::RadonFlexibleCircle) + dict["in"] = value.in_height + dict["out"] = value.out_height + dict["N"] = value.N +end + +# This results in our custom serialization: +toTOML(stdout, reco) + +# We also need to create the corresponding deserialization functions. This is done by defining a `fromTOML` method for the type. +# Since we defined our value to be represented as a dictionary, we will need to construct our type from a dictionary: +function AbstractImageReconstruction.fromTOML(::Type{<:RadonGeometry}, dict::Dict{String, Any}) + return RadonFlexibleCircle(dict["N"], dict["in"], dict["out"]) +end + +# Finally, we can do a round-trip test to see if our serialization and deserialization works: +#md # ```julia +#md # io = IOBuffer() +#md # toTOML(io, reco) +#md # seekstart(io) +#md # recoCopy = loadPlan(io, [Main, OurRadonReco, RadonKA]) +#md # toTOML(stdout, recoCopy) +#md # ``` +io = IOBuffer() #jl +toTOML(io, reco) #jl +seekstart(io) #jl +recoCopy = loadPlan(io, [Main, OurRadonReco, RadonKA]) #jl +toTOML(stdout, recoCopy) #jl + + +# For deserialization we need to provide the module where the type is defined. This is necessary to "find" the correct type during deserialization that allows for the dispatch to the correct `fromTOML` method. +# Generally, this module selection can be done by the reconstruction package developer, though it is also possible for the user to add modules since they can easily extend algorithms with new processing steps. \ No newline at end of file From aa9b0602be4e8359361ba52fe1f23b01a44cddaf Mon Sep 17 00:00:00 2001 From: nHackel Date: Wed, 11 Dec 2024 17:34:51 +0100 Subject: [PATCH 7/7] Add docstrings --- docs/make.jl | 4 +- docs/src/API/algorithm.md | 52 --------- docs/src/API/api.md | 50 +++++++++ docs/src/API/plan.md | 60 ---------- docs/src/example_intro.md | 18 +-- docs/src/index.md | 18 +-- docs/src/literate/example/0_radon_data.jl | 10 +- docs/src/literate/example/1_interface.jl | 15 ++- .../literate/example/example_include_data.jl | 47 ++++---- docs/src/literate/howto/observables.jl | 3 +- src/AbstractImageReconstruction.jl | 2 +- src/AlgorithmInterface.jl | 33 ++++++ src/RecoPlans/Cache.jl | 13 ++- src/RecoPlans/Listeners.jl | 20 +++- src/RecoPlans/RecoPlans.jl | 103 ++++++++++++++++-- src/RecoPlans/Serialization.jl | 24 +++- src/StructTransforms.jl | 35 ++++++ 17 files changed, 328 insertions(+), 179 deletions(-) delete mode 100644 docs/src/API/algorithm.md create mode 100644 docs/src/API/api.md delete mode 100644 docs/src/API/plan.md diff --git a/docs/make.jl b/docs/make.jl index 904a052..5cfc31d 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,4 +1,4 @@ -using Documenter, Literate, AbstractImageReconstruction +using Documenter, Literate, AbstractImageReconstruction, Observables # Generate examples OUTPUT_BASE = joinpath(@__DIR__(), "src/generated") @@ -40,7 +40,7 @@ makedocs( "Caching" => "generated/howto/caching.md", "Observables" => "generated/howto/observables.md", ], - #"API Reference" => Any["Solvers" => "API/solvers.md", + "API Reference" => "API/api.md", #"Regularization Terms" => "API/regularization.md"], ], diff --git a/docs/src/API/algorithm.md b/docs/src/API/algorithm.md deleted file mode 100644 index 3887043..0000000 --- a/docs/src/API/algorithm.md +++ /dev/null @@ -1,52 +0,0 @@ -# API for Regularizers -This page contains documentation of the public API of the RegularizedLeastSquares. In the Julia -REPL one can access this documentation by entering the help mode with `?` - -```@docs -RegularizedLeastSquares.L1Regularization -RegularizedLeastSquares.L2Regularization -RegularizedLeastSquares.L21Regularization -RegularizedLeastSquares.LLRRegularization -RegularizedLeastSquares.NuclearRegularization -RegularizedLeastSquares.TVRegularization -``` - -## Projection Regularization -```@docs -RegularizedLeastSquares.PositiveRegularization -RegularizedLeastSquares.RealRegularization -``` - -## Nested Regularization -```@docs -RegularizedLeastSquares.innerreg(::AbstractNestedRegularization) -RegularizedLeastSquares.sink(::AbstractNestedRegularization) -RegularizedLeastSquares.sinktype(::AbstractNestedRegularization) -``` - -## Scaled Regularization -```@docs -RegularizedLeastSquares.AbstractScaledRegularization -RegularizedLeastSquares.scalefactor -RegularizedLeastSquares.NormalizedRegularization -RegularizedLeastSquares.NoNormalization -RegularizedLeastSquares.MeasurementBasedNormalization -RegularizedLeastSquares.SystemMatrixBasedNormalization -RegularizedLeastSquares.FixedParameterRegularization -``` - -## Misc. Nested Regularization -```@docs -RegularizedLeastSquares.MaskedRegularization -RegularizedLeastSquares.TransformedRegularization -RegularizedLeastSquares.PlugAndPlayRegularization -``` - -## Miscellaneous Functions -```@docs -RegularizedLeastSquares.prox!(::AbstractParameterizedRegularization, ::AbstractArray) -RegularizedLeastSquares.prox!(::Type{<:AbstractParameterizedRegularization}, ::Any, ::Any) -RegularizedLeastSquares.norm(::AbstractParameterizedRegularization, ::AbstractArray) -RegularizedLeastSquares.λ(::AbstractParameterizedRegularization) -RegularizedLeastSquares.norm(::Type{<:AbstractParameterizedRegularization}, ::Any, ::Any) -``` \ No newline at end of file diff --git a/docs/src/API/api.md b/docs/src/API/api.md new file mode 100644 index 0000000..eb290e8 --- /dev/null +++ b/docs/src/API/api.md @@ -0,0 +1,50 @@ +# API for Solvers +This page contains documentation of the public API of the AbstractImageReconstruction. In the Julia +REPL one can access this documentation by entering the help mode with `?` + +## Algorithm and Parameters +```@docs +AbstractImageReconstruction.AbstractImageReconstructionAlgorithm +AbstractImageReconstruction.reconstruct +Base.put!(::AbstractImageReconstructionAlgorithm, ::Any) +Base.take!(::AbstractImageReconstructionAlgorithm) +AbstractImageReconstruction.AbstractImageReconstructionParameters +AbstractImageReconstruction.process +AbstractImageReconstruction.parameter +``` + +## RecoPlan +```@docs +AbstractImageReconstruction.RecoPlan +Base.propertynames(::RecoPlan) +Base.getproperty(::RecoPlan, ::Symbol) +Base.getindex(::RecoPlan, ::Symbol) +Base.setproperty!(::RecoPlan, ::Symbol, ::Any) +AbstractImageReconstruction.setAll! +AbstractImageReconstruction.clear! +Base.ismissing(::RecoPlan, ::Symbol) +Observables.on(::Any, ::RecoPlan, ::Symbol) +Observables.off(::RecoPlan, ::Symbol, ::Any) +AbstractImageReconstruction.build +AbstractImageReconstruction.toPlan +AbstractImageReconstruction.savePlan +AbstractImageReconstruction.loadPlan +AbstractImageReconstruction.loadListener! +AbstractImageReconstruction.parent(::RecoPlan) +AbstractImageReconstruction.parent!(::RecoPlan, ::RecoPlan) +AbstractImageReconstruction.parentproperty +AbstractImageReconstruction.parentproperties +``` + +## Miscellaneous +```@docs +AbstractImageReconstruction.LinkedPropertyListener +AbstractImageReconstruction.ProcessResultCache +Base.hash(::AbstractImageReconstructionParameters, ::UInt64) +AbstractImageReconstruction.toKwargs(::AbstractImageReconstructionParameters) +AbstractImageReconstruction.fromKwargs +AbstractImageReconstruction.toDict +AbstractImageReconstruction.toDict! +AbstractImageReconstruction.toDictValue +AbstractImageReconstruction.toDictValue! +``` diff --git a/docs/src/API/plan.md b/docs/src/API/plan.md deleted file mode 100644 index 5774048..0000000 --- a/docs/src/API/plan.md +++ /dev/null @@ -1,60 +0,0 @@ -# API for Solvers -This page contains documentation of the public API of the RegularizedLeastSquares. In the Julia -REPL one can access this documentation by entering the help mode with `?` - -## solve! -```@docs -RegularizedLeastSquares.solve!(::AbstractLinearSolver, ::Any) -RegularizedLeastSquares.init!(::AbstractLinearSolver, ::Any) -RegularizedLeastSquares.init!(::AbstractLinearSolver, ::AbstractSolverState, ::AbstractMatrix) - -``` - -## ADMM -```@docs -RegularizedLeastSquares.ADMM -``` - -## CGNR -```@docs -RegularizedLeastSquares.CGNR -``` - -## Kaczmarz -```@docs -RegularizedLeastSquares.Kaczmarz -``` - -## FISTA -```@docs -RegularizedLeastSquares.FISTA -``` - -## OptISTA -```@docs -RegularizedLeastSquares.OptISTA -``` - -## POGM -```@docs -RegularizedLeastSquares.POGM -``` - -## SplitBregman -```@docs -RegularizedLeastSquares.SplitBregman -``` - -## Miscellaneous -```@docs -RegularizedLeastSquares.solverstate -RegularizedLeastSquares.solversolution -RegularizedLeastSquares.solverconvergence -RegularizedLeastSquares.StoreSolutionCallback -RegularizedLeastSquares.StoreConvergenceCallback -RegularizedLeastSquares.CompareSolutionCallback -RegularizedLeastSquares.linearSolverList -RegularizedLeastSquares.createLinearSolver -RegularizedLeastSquares.applicableSolverList -RegularizedLeastSquares.isapplicable -``` \ No newline at end of file diff --git a/docs/src/example_intro.md b/docs/src/example_intro.md index e8f4032..938b42d 100644 --- a/docs/src/example_intro.md +++ b/docs/src/example_intro.md @@ -1,7 +1,7 @@ # Small Reconstruction Package for Radon projections -In this example we will implement a small image reconstruction package with the help of `AbstractImageReconstruction.jl`. Our example reconstruction package aims to provide direct and iterative reconstruction algorithms for Radon projection data. +In this example we will implement a small image reconstruction package using `AbstractImageReconstruction.jl`. Our reconstruction package `OurRadonreco` aims to provide direct and iterative reconstruction algorithms for Radon projection data. -Most of the desired functionality is already implemented in various Julia packages. Our reconstruction packages now needs to properly connect these packages and transform the data into the appropriate formats for each package. +Most of the desired functionality is already implemented in various Julia packages. Our reconstruction package now needs to properly link these packages and transform the data into the appropriate formats for each package. !!! note The example is intended for developers of reconstruction packages that use `AbstractImageReconstruction`. End-users of such a package can consult the result sections of the example to see the high-level interface of `AbstractImagerReconstruction` and should otherwise consult the documentation of the concrete reconstruction package itself. @@ -16,27 +16,27 @@ Pkg.add("AbstractImageReconstruction") This will download and install AbstractImageReconstruction.jl and its dependencies. To install a different version, please consult the [Pkg documentation](https://pkgdocs.julialang.org/dev/managing-packages/#Adding-packages). In addition to AbstractImageReconstruction.jl, we will need a few more packages to get started, which we can install the same way. -[RadonKA.jl](https://github.com/roflmaostc/RadonKA.jl/tree/main) provides us with fast Radon forward and backprojections, which we can use for direct reconstructions and preparing example data for our package. +[RadonKA.jl](https://github.com/roflmaostc/RadonKA.jl/tree/main) provides us with fast Radon forward and backward projections, which we can use for direct reconstructions and to prepare sample data for our package. -[LinearOperatorCollection.jl](https://github.com/JuliaImageRecon/LinearOperatorCollection.jl) wraps the functionality of RadonKA.jl in a matrix-free linear operator, which can be used in iterative solvers. +[LinearOperatorCollection.jl](https://github.com/JuliaImageRecon/LinearOperatorCollection.jl) wraps the functionality of RadonKA.jl into a matrix-free linear operator, that can be used in iterative solvers. -[RegularizedLeastSquares.jl](https://github.com/JuliaImageRecon/RegularizedLeastSquares.jl) offers a variety of iterative solver and regularization options. +[RegularizedLeastSquares.jl](https://github.com/JuliaImageRecon/RegularizedLeastSquares.jl) offers a variety of iterative solvers and regularization options. [ImagePhantoms.jl](https://github.com/JuliaImageRecon/ImagePhantoms.jl) and [ImageGeoms.jl](https://github.com/JuliaImageRecon/ImageGeoms.jl) allow us to define digital software "phantoms", which we will use to test our reconstruction algorithms. Lastly, we will use [CairoMakie.jl](https://docs.makie.org/stable/) to visualize our results. ## Outline -[Radon Data](generated/example/0_radon_data.md): In this section we get familiar with RadonKA.jl and define a small dataformat for three-dimensional time-series sinograms. We also create the inverse problem, which we want to solve in the remainder of the example. +[Radon Data](generated/example/0_radon_data.md): this section we will familiarise ourselves with RadonKA.jl and define a small data format for three-dimensional time series sinograms. We also create the inverse problem that we will solve in the rest of the example [Interface](generated/example/1_interface.md): Here we define the abstract types we will use in our package and take a look at what we need to implement to interact with `AbstractImageReconstruction`. We also start with a first processing step of our algorithms. [Direct Reconstruction](generated/example/2_direct.md): Now we extend our abstract types with a concrete implementation of reconstruction algorithms using the backprojection and filtered backprojection. -[Direct Reconstruction Result](generated/example/3_direct_result.md): This section shows how to use the algorithm we just implemented. +[Direct Reconstruction Result](generated/example/3_direct_result.md): This section shows how to use the algorithm we have just implemented. [Iterative Reconstruction](generated/example/4_iterative.md): We finish our small example package by implementing an iterative reconstruction algorithm. For this algorithm we require more complex parametrization and data processing. -[Iterative Reconstruction Result](generated/example/5_iterative_result.md): The last section again shows how to use the just implemented algorithm. But it also highlights `RecoPlans`, which are a core utility of `AbstractImageReconstruction`. These plans allow a user to easily configure, store and load algorithms as templates. +[Iterative Reconstruction Result](generated/example/5_iterative_result.md): The last section shows again how to use the just implemented algorithm. But it also highlights `RecoPlans`, which are a core utility of `AbstractImageReconstruction`. These plans allow a user to easily configure, save and load algorithms as templates. -For an even more indepth reconstruction package we refer to the magnetic particle imaging reconstruction package [MPIReco.jl](https://github.com/MagneticParticleImaging/MPIReco.jl). +For an even more detailed reconstruction package we refer to the magnetic particle imaging reconstruction package [MPIReco.jl](https://github.com/MagneticParticleImaging/MPIReco.jl). \ No newline at end of file diff --git a/docs/src/index.md b/docs/src/index.md index 08bca82..fff119f 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -4,13 +4,14 @@ ## Introduction -AbstractImageReconstruction.jl is a Julia package that serves as the core API for medical imaging packages. It provides implementations an interface and type hierarchy with which one can represent and implement image reconstruction algorithms, their parameters and runtime behaviour. In particular, this package serves as the API of the Julia packages [MPIReco.jl](https://github.com/MagneticParticleImaging/MPIReco.jl). +AbstractImageReconstruction.jl is a Julia package that serves as the core API for medical imaging packages. It provides implementations an interface and type hierarchy to represent and implement image reconstruction algorithms, their parameters and runtime behaviour. In particular, this package serves as the API of the Julia packages [MPIReco.jl](https://github.com/MagneticParticleImaging/MPIReco.jl). ## Features - + +* Reconstruction control flow defined with multiple-dispatch on extensible and exchangable type hierarchies * Storing, loading and manipulating of reconstruction algorithms with (partially) set parameters -* Attaching callbacks to parameters changes with Observables.jl -* Transparent caching of intermediate reconstruction results +* Attaching callbacks to parameter changes with Observables.jl +* Various generic utilities such as transparent caching of intermediate reconstruction results ## Installation @@ -22,7 +23,8 @@ Pkg.add("AbstractImageReconstruction") AbstractImageReconstruction is not intended to be used alone, but together with an image reconstruction package that implements the provided interface, such as [MPIReco.jl](https://github.com/MagneticParticleImaging/MPIReco.jl). ## Usage -Concrete construction of reconstruction algorithms depend on the implementation of the reconstruction package. Once an algorithms is constructed with the given paramters, images can be reconstructed as follows: +The actual construction of reconstruction algorithms depends on the implementation of the reconstruction package. Once an algorithm is constructed with the given parameters, images can be reconstructed as follows: + ```julia using AbstractImageReconstruction, MPIReco @@ -32,7 +34,7 @@ raw = ... # Setup raw data image = reconstruct(algo, raw) ``` -Once an algorithm is constructed it can be transformed into a `RecoPlan`. These are mutable and transparent wrappers around the nested types of the algorithm and its paramters, that can be stored and restored to and from TOML files. +An algorithm can be transformed into a `RecoPlan`. These are mutable and transparent wrappers around the nested types of the algorithm and its parameters, which can be saved and restored to and from TOML files. ```julia plan = toPlan(algo) @@ -42,6 +44,6 @@ plan = loadPlan(MPIReco, "Example", [MPIReco, RegularizedLeastSquares, MPIFiles] algo2 = build(plan) algo == algo2 # true ``` -Unlike concrete algorithm instances, a `RecoPlan` may still be missing certain values of its fields and it can encode the structure of an image reconstruction algorithm without concrete parameterization. +Unlike concrete algorithm instances, a `RecoPlan` may still be missing certain values of its properties. Futhermore, they can encode the structure of an image reconstruction algorithm without concrete parameterization. -It is also possible to attach `Listeners` to `RecoPlan` fields, that call user-specified functions if they are changed. This allows specific `RecoPlans` to provide smart default paramter choices or embedding a plan into a GUI. \ No newline at end of file +It is also possible to attach functions to `RecoPlan` properties, that call user-specified functions if they are changed using `Observables.jl`. This allows specific `RecoPlans` to provide smart default parameter choices or embedding a plan into a GUI. \ No newline at end of file diff --git a/docs/src/literate/example/0_radon_data.jl b/docs/src/literate/example/0_radon_data.jl index dc9eebe..d267c7f 100644 --- a/docs/src/literate/example/0_radon_data.jl +++ b/docs/src/literate/example/0_radon_data.jl @@ -1,15 +1,15 @@ # # Radon Data -# In this example we will setup our Radon data using RadonKA.jl, ImagePhantoms.jl and ImageGeoms.jl. We will start with simple 2D images and their sinograms and move up to a time series of 3D images and sinograms. +# In this example we will set up our radon data using RadonKA.jl, ImagePhantoms.jl and ImageGeoms.jl. We will start with simple 2D images and their sinograms and continue with a time series of 3D images and sinograms. # ## Background -# The Radon transform is a integral transform which projects the values of a function(/or a phantom) along straight lines onto a detector. +# The Radon transform is an integral transform that projects the values of a function(or a phantom) along straight lines onto a detector. # These projections are recorded for a number of different angles and form the so-called sinogram. The Radon transform and its adjoint form the mathematical basis -# for Computed Tomography (CT) and other imaging modalities such as single photon emission computed tomography (SPECT) and positron emission tomography (PET). +# for computed tomography (CT) and other imaging modalities such as single photon emission computed tomography (SPECT) and positron emission tomography (PET). # ## 2D Phantom # We will use a simple Shepp-Logan phantom to generate our Radon data. The phantom is a standard test image for medical imaging and consists of a number of ellipses with different intensities. -# It can be generated with ImagePhantoms.jl and ImageGeoms.jl. as follows: +# It can be generated using ImagePhantoms.jl and ImageGeoms.jl. as follows: using ImagePhantoms, ImageGeoms N = 256 @@ -17,7 +17,7 @@ image = shepp_logan(N, SheppLoganToft()) size(image) # This produces a 256x256 image of a Shepp-Logan phantom. Next, we will generate the Radon data using `radon` from RadonKA.jl. -# The arguments of this function are image or phantom under transformation, the anlges at which the projections are taken, and the used geometry of the system. For this example we will use the default parallel circle geometry. +# The arguments of this function are the image or phantom to be transformed, the angles at which the projections are taken, and the used geometry of the system. For this example we will use the default parallel circle geometry. # For more details, we refer to the RadonKA.jl documentation. We will use 256 angles for the projections, between 0 and π. using RadonKA angles = collect(range(0, π, 256)) diff --git a/docs/src/literate/example/1_interface.jl b/docs/src/literate/example/1_interface.jl index 35be854..da0c298 100644 --- a/docs/src/literate/example/1_interface.jl +++ b/docs/src/literate/example/1_interface.jl @@ -26,16 +26,17 @@ abstract type AbstractDirectRadonAlgorithm <: AbstractRadonAlgorithm end abstract type AbstractIterativeRadonAlgorithm <: AbstractRadonAlgorithm end # ## Internal Interface -# Reconstruction algorithms in AbstractImageReconstruction.jl are expected to be implemented in terms of distinct processing steps, implemented in their own `process` methods. +# Reconstruction algorithms in AbstractImageReconstruction.jl are expected to be implemented in the form of distinct processing steps, implemented in their own `process` methods. # The `process` function takes an algorithm, parameters, and inputs and returns the result of the processing step. -# If no function is defined for an instance of an algorithm, the default implementation is called. This method tries to invoke the function `process` with the type of the algorithm: +# If no function is defined for an instance of an algorithm, the default implementation is called. This method tries to call the function `process` with the type of the algorithm: # ```julia # process(algo::AbstractImageReconstructionAlgorithm, param::AbstractImageReconstructionParameters, inputs...) = process(typeof(algo), param, inputs...) # ``` -# The implementation of reconstruction algorithms is thus expected to either implement the `process` function for the algorithm type or instance. Dispatch on instances allow an instance to change its state, while dispatch on types allows for pure helper functions. +# The implementation of reconstruction algorithms is therefore expected to either implement the `process` function for the algorithm type or for the instance. Dispatch on instances allow an instance to change its state, while dispatch on types allows for pure helper functions. + # A `process` itself can invoke other `process` functions to enable multiple processing steps and generally have arbitarry control flow. It is not required to implement a straight-foward pipeline. We will see this later when we implementd our algorithms. -# Let's define a preprocessing step we can share between our algorithms. We want to allow the user to select certain frames from a time data and average them. +# Let's define a preprocessing step that we can share between our algorithms. We want to allow the user to select certain frames from a time series and average them. # We will use the `@kwdef` macro to provide constructor with keyword arguments and default values using Statistics Base.@kwdef struct RadonPreprocessingParameters <: AbstractRadonPreprocessingParameters @@ -54,8 +55,6 @@ function AbstractImageReconstruction.process(::Type{<:AbstractRadonAlgorithm}, p return data end -# ## External Interface +# ## User Interface # A user of our package should be able to reconstruct images by calling the `reconstruct` function. This function takes an algorithm and an input and returns the reconstructed image. -# Internally, the `reconstruct` function calls the `put!` and `take!` functions of the algorithm to pass the input and retrieve the output. Algorithms must implement these functions and are expected to have FIFO behavior. - -# Algorithms can internally construct and use further algorithms to which they pass data. \ No newline at end of file +# Internally, the `reconstruct` function calls the `put!` and `take!` functions of the algorithm to pass the input and retrieve the output. Algorithms must implement these functions and are expected to have FIFO behavior. \ No newline at end of file diff --git a/docs/src/literate/example/example_include_data.jl b/docs/src/literate/example/example_include_data.jl index 9b380bd..5eacee0 100644 --- a/docs/src/literate/example/example_include_data.jl +++ b/docs/src/literate/example/example_include_data.jl @@ -6,24 +6,29 @@ function plot_image(figPos, img; title = "", width = 150, height = 150) hidedecorations!(ax) hm = heatmap!(ax, img) Colorbar(figPos[1, 2], hm) -end -angles = collect(range(0, π, 256)) -shape = (64, 64, 64) -params = map(collect, ellipsoid_parameters(; fovs = shape)) -toft_settings = [1.0, -0.8, -0.2, -0.2, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] -for idx in eachindex(toft_settings) - params[idx][10] = toft_settings[idx] -end -ob = ellipsoid(map(Tuple, params)) -ig = ImageGeom(;dims = shape) -image = phantom(axes(ig)..., ob) -sinogram = Array(RadonKA.radon(image, angles)) -sinograms = similar(sinogram, size(sinogram)..., 5) -images = similar(image, size(image)..., 5) -for (i, intensity) in enumerate(range(params[3][end], 0.3, 5)) - params[3][end] = intensity - local ob = ellipsoid(map(Tuple, params)) - local ig = ImageGeom(;dims = shape) - images[:, :, :, i] = phantom(axes(ig)..., ob) - sinograms[:, :, :, i] = Array(RadonKA.radon(images[:, :, :, i], angles)) -end \ No newline at end of file +end + +isDataDefined = @isdefined sinograms +angles, shape, sinograms, images = isDataDefined ? (angles, shape, sinograms, images) : begin + angles = collect(range(0, π, 256)) + shape = (64, 64, 64) + params = map(collect, ellipsoid_parameters(; fovs = shape)) + toft_settings = [1.0, -0.8, -0.2, -0.2, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] + for idx in eachindex(toft_settings) + params[idx][10] = toft_settings[idx] + end + ob = ellipsoid(map(Tuple, params)) + ig = ImageGeom(;dims = shape) + local image = phantom(axes(ig)..., ob) + sinogram = Array(RadonKA.radon(image, angles)) + sinograms = similar(sinogram, size(sinogram)..., 5) + images = similar(image, size(image)..., 5) + for (i, intensity) in enumerate(range(params[3][end], 0.3, 5)) + params[3][end] = intensity + local ob = ellipsoid(map(Tuple, params)) + local ig = ImageGeom(;dims = shape) + images[:, :, :, i] = phantom(axes(ig)..., ob) + sinograms[:, :, :, i] = Array(RadonKA.radon(images[:, :, :, i], angles)) + end + return angles, shape, sinograms, images +end \ No newline at end of file diff --git a/docs/src/literate/howto/observables.jl b/docs/src/literate/howto/observables.jl index 8fc99e3..4c518b8 100644 --- a/docs/src/literate/howto/observables.jl +++ b/docs/src/literate/howto/observables.jl @@ -4,7 +4,8 @@ include("../../literate/example/example_include_all.jl") #hide # Observables from [Observables.jl](https://github.com/JuliaGizmos/Observables.jl) are containers which can invoke callbacks whenever their stored value is changed. # Each property of a `RecoPlan` is an `Òbservable` to which functions can be attached. These function listen to changes of the Observables value. # This can be used to store "logic" about the parameter within a plan, such as a function to update and visualize the current state of a plan or to calculate default values whenever a parameter changes. -# In this documentation we will focus on the interaction between `RecoPlans` and `Observables`. For more details on the `Observables` API we refer to the [package](https://juliagizmos.github.io/Observables.jl/stable/) and [Makie](https://docs.makie.org/dev/explanations/Observables) documentation. + +# In this documentation we will focus on the interaction between `RecoPlans` and `Observables`. For more details on the `Observables` API we refer to the [package](https://juliagizmos.github.io/Observables.jl/stable/) and [Makie](https://docs.makie.org/stable/explanations/observables) documentation. using Observables plan = RecoPlan(DirectRadonAlgorithm; parameter = RecoPlan(DirectRadonParameters; pre = RecoPlan(RadonPreprocessingParameters; frames = collect(1:3)), diff --git a/src/AbstractImageReconstruction.jl b/src/AbstractImageReconstruction.jl index 06dbaa2..ad1ed3d 100644 --- a/src/AbstractImageReconstruction.jl +++ b/src/AbstractImageReconstruction.jl @@ -6,7 +6,7 @@ using Observables using Scratch using LRUCache -import Base: put!, take!, fieldtypes, fieldtype, ismissing, propertynames, parent, hash +import Base: put!, take!, fieldtypes, fieldtype, ismissing, propertynames, parent, hash, wait, isready include("AlgorithmInterface.jl") include("StructTransforms.jl") diff --git a/src/AlgorithmInterface.jl b/src/AlgorithmInterface.jl index bbd5d20..f8c64e0 100644 --- a/src/AlgorithmInterface.jl +++ b/src/AlgorithmInterface.jl @@ -1,14 +1,39 @@ export AbstractImageReconstructionAlgorithm +""" + AbstractImageReconstructionAlgorithm + +Abstract type for image reconstruction algorithms. Must implement `put!` and `take!` functions. Serialization expects a constructor with a single `AbstractImageReconstructionParameters` argument. +""" abstract type AbstractImageReconstructionAlgorithm end export AbstractImageReconstructionParameters +""" + AbstractImageReconstructionParameters + +Abstract type for image reconstruction parameters. An algorithm consists of one ore more `process` steps, each can have its own parameters. Parameters can be arbitrarly nested. +""" abstract type AbstractImageReconstructionParameters end export put!, take! +""" + put!(algo::AbstractImageReconstructionAlgorithm, inputs...) + +Perform image reonstruction with algorithm `algo` on given `ìnputs`. Depending on the algorithm this might block. Result is stored and can be retrieved with `take!`. +""" put!(algo::AbstractImageReconstructionAlgorithm, inputs...) = error("$(typeof(algo)) must implement put!") +""" + take!(algo::AbstractImageReconstructionAlgorithm) + +Remove and return a stored result from the algorithm `algo`. Blocks until a result is available. +""" take!(algo::AbstractImageReconstructionAlgorithm) = error("$(typeof(algo)) must implement take!") export reconstruct +""" + reconstruct(algo::T, u) where {T<:AbstractImageReconstructionAlgorithm} + +Reconstruct an image from input `u` using algorithm `algo`. +""" function reconstruct(algo::T, u) where {T<:AbstractImageReconstructionAlgorithm} put!(algo, u) return take!(algo) @@ -19,6 +44,9 @@ export process # Overwrite process(algo, ...) to mutate struct based on helper function result """ process(algo::Union{A, Type{A}}, param::AbstractImageReconstructionParameters, inputs...) where A <: AbstractImageReconstructionAlgorithm + +Process `inputs` with algorithm `algo` and parameters `param`. Returns the result of the processing step. +If not implemented for an instance of `algo`, the default implementation is called with the type of `algo`. """ function process end process(algo::AbstractImageReconstructionAlgorithm, param::AbstractImageReconstructionParameters, inputs...) = process(typeof(algo), param, inputs...) @@ -35,4 +63,9 @@ function process(algo::AbstractImageReconstructionAlgorithm, params::Vector{<:Ab end export parameter +""" + parameter(algo::AbstractImageReconstructionAlgorithm) + +Return the parameters of the algorithm `algo`. +""" parameter(algo::AbstractImageReconstructionAlgorithm) = error("$(typeof(algo)) must implement parameter") \ No newline at end of file diff --git a/src/RecoPlans/Cache.jl b/src/RecoPlans/Cache.jl index b78cb3d..bf7b5da 100644 --- a/src/RecoPlans/Cache.jl +++ b/src/RecoPlans/Cache.jl @@ -1,4 +1,10 @@ export ProcessResultCache +""" + ProcessResultCache(params::AbstractImageReconstructionParameters; maxsize = 1, kwargs...) + +Cache of size `maxsize` for the result of `process` methods. The cache is based on the `hash` of the inputs of the `process` function. Cache is shared between all algorithms constructed from the same plan. +The cache is transparent for properties of the underlying parameter. Cache can be invalidated by calling `empty!` on the cache. +""" Base.@kwdef mutable struct ProcessResultCache{P <: AbstractImageReconstructionParameters} <: AbstractImageReconstructionParameters param::P const maxsize::Int64 = 1 @@ -64,7 +70,7 @@ function validvalue(plan, union::UnionAll, value::RecoPlan{<:ProcessResultCache} end # Do not serialize cache and lock, only param -function addDictValue!(dict, cache::RecoPlan{<:ProcessResultCache}) +function toDictValue!(dict, cache::RecoPlan{<:ProcessResultCache}) size = cache.maxsize if !ismissing(size) dict["maxsize"] = size @@ -86,6 +92,11 @@ function loadPlan!(plan::RecoPlan{<:ProcessResultCache}, dict::Dict{String, Any} return plan end +""" + empty!(cache::ProcessResultCache) + +Empty the cache of the `ProcessResultCache` +""" Base.empty!(cache::ProcessResultCache) = empty!(cache.cache) """ diff --git a/src/RecoPlans/Listeners.jl b/src/RecoPlans/Listeners.jl index 3ab2323..b47cb1a 100644 --- a/src/RecoPlans/Listeners.jl +++ b/src/RecoPlans/Listeners.jl @@ -1,9 +1,25 @@ export AbstractPlanListener +""" + AbstractPlanListener + +Abstract type for listeners that can be attached to `RecoPlans` and are serialized together with the plan. +Structs implementing this type must be function-like-objects that take a single argument for the `Observable` callback. +""" abstract type AbstractPlanListener end const LISTENER_TAG = "_listener" -Observables.on(f, plan::RecoPlan, field::Symbol; kwargs...) = on(f, getfield(plan, :values)[field]; kwargs...) -Observables.off(plan::RecoPlan, field::Symbol, f) = off(f, getfield(plan, :values)[field]) +""" + on(f, plan::RecoPlan, property::Symbol; kwargs...) + +Adds function `f` as listener to `property` of `plan`. The function is called whenever the property is changed with `setproperty!`. +""" +Observables.on(f, plan::RecoPlan, property::Symbol; kwargs...) = on(f, plan[property]; kwargs...) +""" + off(plan::RecoPlan, property::Symbol, f) + +Remove `f` from the listeners of `property` of `plan`. +""" +Observables.off(plan::RecoPlan, property::Symbol, f) = off(f, plan[property]) include("LinkedPropertyListener.jl") \ No newline at end of file diff --git a/src/RecoPlans/RecoPlans.jl b/src/RecoPlans/RecoPlans.jl index 0f47fac..8d8eedf 100644 --- a/src/RecoPlans/RecoPlans.jl +++ b/src/RecoPlans/RecoPlans.jl @@ -1,7 +1,22 @@ export RecoPlan +""" + RecoPlan{T <: Union{AbstractImageReconstructionParameters, AbstractImageReconstructionAlgorithm}} + +Configuration template for an image reconstruction algorithm or paremeters of type `T`. +A `RecoPlan{T}` has the same properties with type checking as `T` with the exception that properties can be missing and nested algorithms and parameters can again be `RecoPlan`s. + +Plans can be nested and form a tree. A parent plan can be accessed with `parent` and set with `parent!`. Algorithms and parameters can be converted to a plan with `toPlan`. + +Plans feature serialization with `toTOML`, `toPlan` and `loadPlan` and the ability to attach callbacks to property changes with `Òbservables` and `on`. +""" mutable struct RecoPlan{T<:Union{AbstractImageReconstructionParameters, AbstractImageReconstructionAlgorithm}} parent::Union{Nothing, RecoPlan} values::Dict{Symbol, Observable{Any}} + """ + RecoPlan(::Type{T}; kwargs...) where {T<:AbstractImageReconstructionParameters} + + Construct a RecoPlan of type `T` and set the properties with the given keyword arguments. + """ function RecoPlan(::Type{T}; kwargs...) where {T<:AbstractImageReconstructionParameters} dict = Dict{Symbol, Observable{Any}}() for field in filter(f -> !startswith(string(f), "_"), fieldnames(T)) @@ -11,6 +26,11 @@ mutable struct RecoPlan{T<:Union{AbstractImageReconstructionParameters, Abstract setAll!(plan; kwargs...) return plan end + """ + RecoPlan(::Type{T}; parameter = missing) where {T<:AbstractImageReconstructionAlgorithm} + + Construct a RecoPlan of type `T` and set the main parameter of the algorithm. + """ function RecoPlan(::Type{T}; parameter = missing) where {T<:AbstractImageReconstructionAlgorithm} dict = Dict{Symbol, Observable{Any}}() dict[:parameter] = Observable{Any}(missing) @@ -20,9 +40,23 @@ mutable struct RecoPlan{T<:Union{AbstractImageReconstructionParameters, Abstract end end +""" + propertynames(plan::RecoPlan{T}) where {T} +Return a tupel of configurable properties of `T`. Unlike `propertynames(T)` this does not include properties starting with `_`. +""" Base.propertynames(plan::RecoPlan{T}) where {T} = Tuple(keys(getfield(plan, :values))) +""" + getproperty(plan::RecoPlan{T}, name::Symbol) where {T} + +Get the property `name` of `plan`. Equivalent to `plan.name`. +""" Base.getproperty(plan::RecoPlan{T}, name::Symbol) where {T} = getfield(plan, :values)[name][] +""" + getindex(plan::RecoPlan{T}, name::Symbol) where {T} + +Return the `Observable` for the `name` property of `plan`. Equivalent to `plan[name]`. +""" Base.getindex(plan::RecoPlan{T}, name::Symbol) where {T} = getfield(plan, :values)[name] export types, type @@ -38,7 +72,11 @@ function type(plan::RecoPlan{T}, name::Symbol) where {T<:AbstractImageReconstruc end types(::RecoPlan{T}) where {T<:AbstractImageReconstructionAlgorithm} = [type(plan, name) for name in propertynames(plan)] - +""" + setproperty!(plan::RecoPlan{T}, name::Symbol, x::X) where {T, X} + +Set the property `name` of `plan` to `x`. Equivalent to `plan.name = x`. Triggers callbacks attached to the property. +""" function Base.setproperty!(plan::RecoPlan{T}, name::Symbol, x::X) where {T, X} if !haskey(getfield(plan, :values), name) error("type $T has no field $name") @@ -47,8 +85,6 @@ function Base.setproperty!(plan::RecoPlan{T}, name::Symbol, x::X) where {T, X} t = type(plan, name) if validvalue(plan, t, x) getfield(plan, :values)[name][] = x - - else getfield(plan, :values)[name][] = convert(t, x) end @@ -59,7 +95,6 @@ function Base.setproperty!(plan::RecoPlan{T}, name::Symbol, x::X) where {T, X} return Base.getproperty(plan, name) end -ispropertyset(plan::RecoPlan, name::Symbol) = getfield(plan, :setProperties)[name] validvalue(plan, t, value::Missing) = true validvalue(plan, ::Type{T}, value::X) where {T, X <: T} = true validvalue(plan, ::Type{T}, value::RecoPlan{<:T}) where T = true @@ -72,6 +107,11 @@ validvalue(plan, ::Type{arrT}, value::AbstractArray) where {T, arrT <: AbstractA #X <: t || X <: RecoPlan{<:t} || ismissing(x) export setAll! +""" + setAll!(plan::RecoPlan{T}, name::Symbol, x) where {T<:AbstractImageReconstructionParameters} + +Recursively set the property `name` of each nested `RecoPlan` of `plan` to `x`. +""" function setAll!(plan::RecoPlan{T}, name::Symbol, x) where {T<:AbstractImageReconstructionParameters} fields = getfield(plan, :values) @@ -107,6 +147,11 @@ setAll!(plan::RecoPlan, dict::Dict{Symbol, Any}) = setAll!(plan; dict...) setAll!(plan::RecoPlan, dict::Dict{String, Any}) = setAll!(plan, Dict{Symbol, Any}(Symbol(k) => v for (k,v) in dict)) export clear! +""" + clear!(plan::RecoPlan{T}, preserve::Bool = true) where {T<:AbstractImageReconstructionParameters} + +Clear all properties of `plan`. If `preserve` is `true`, nested `RecoPlan`s are preserved. +""" function clear!(plan::RecoPlan{T}, preserve::Bool = true) where {T<:AbstractImageReconstructionParameters} dict = getfield(plan, :values) for key in keys(dict) @@ -123,29 +168,66 @@ end clear!(plan::RecoPlan{T}, preserve::Bool = true) where {T<:AbstractImageReconstructionAlgorithm} = clear!(plan.parameter, preserve) -export parent, parent! +export parent, parent!, parentproperty, parentproperties +""" + parent(plan::RecoPlan) + +Return the parent of `plan`. +""" parent(plan::RecoPlan) = getfield(plan, :parent) +""" + parent!(plan::RecoPlan, parent::RecoPlan) + +Set the parent of `plan` to `parent`. +""" parent!(plan::RecoPlan, parent::RecoPlan) = setfield!(plan, :parent, parent) +""" + parentproperties(plan::RecoPlan) + +Return a vector of property names of `plan` in its parent, s.t. `getproperty(parent(plan), last(parentproperties(plan))) === plan`. Return an empty vector if `plan` has no parent. +""" function parentproperties(plan::RecoPlan) trace = Symbol[] return parentproperties!(trace, plan) end -function parentproperties!(trace::Vector{Symbol}, plan::RecoPlan) +""" + parentproperty(plan::RecoPlan) + +Return the property name of `plan` in its parent, s.t. `getproperty(parent(plan), parentproperty(plan)) === plan`. Return `nothing` if `plan` has no parent. +""" +function parentproperty(plan::RecoPlan) p = parent(plan) if !isnothing(p) for property in propertynames(p) if getproperty(p, property) === plan - pushfirst!(trace, property) - return parentproperties!(trace, p) + return property end end end + return nothing +end +function parentproperties!(trace::Vector{Symbol}, plan::RecoPlan) + p = parent(plan) + parentprop = parentproperty(plan) + if !isnothing(p) && !isnothing(parentprop) + pushfirst!(trace, parentprop) + return parentproperties!(trace, p) + end return trace end +""" + ismissing(plan::RecoPlan, name::Symbol) +Indicate if the property `name` of `plan` is missing. +""" Base.ismissing(plan::RecoPlan, name::Symbol) = ismissing(getfield(plan, :values)[name]) export build +""" + build(plan::RecoPlan{T}) where {T} + +Recursively build a plan from a `RecoPlan` by converting all properties to their actual values using keyword argument constructors. +""" function build(plan::RecoPlan{T}) where {T<:AbstractImageReconstructionParameters} fields = Dict{Symbol, Any}() # Retrieve key-value (property-value) pairs of the plan @@ -171,6 +253,11 @@ function build(plan::RecoPlan{T}) where {T<:AbstractImageReconstructionAlgorithm end export toPlan +""" + toPlan(param::Union{AbstractImageReconstructionParameters, AbstractImageReconstructionAlgorithm}) + +Convert an `AbstractImageReconstructionParameters` or `AbstractImageReconstructionAlgorithm` to a (nested) `RecoPlan`. +""" function toPlan(param::AbstractImageReconstructionParameters) args = Dict{Symbol, Any}() plan = RecoPlan(typeof(param)) diff --git a/src/RecoPlans/Serialization.jl b/src/RecoPlans/Serialization.jl index 0389f34..923f038 100644 --- a/src/RecoPlans/Serialization.jl +++ b/src/RecoPlans/Serialization.jl @@ -15,11 +15,22 @@ function planpath(m::Module, name::AbstractString) end export savePlan -savePlan(filename::AbstractString, plan::RecoPlan) = toTOML(filename, plan) +""" + savePlan(file::Union{AbstractString, IO}, plan::RecoPlan) + +Save the `plan` to the `file` in TOML format. +See also `loadPlan`, `toTOML`, `toDict`. +""" +savePlan(file::Union{AbstractString, IO}, plan::RecoPlan) = toTOML(file, plan) savePlan(m::Module, planname::AbstractString, plan::RecoPlan) = savePlan(planpath(m, planname), plan) toDictModule(plan::RecoPlan{T}) where {T} = parentmodule(T) toDictType(plan::RecoPlan{T}) where {T} = RecoPlan{getfield(parentmodule(T), nameof(T))} +""" + toDictValue!(dict, plan::RecoPlan) + +Adds the properties of `plan` to `dict` using `toDictValue` for each not missing field. Additionally, adds each listener::AbstractPlanListener to the dict. +""" function toDictValue!(dict, value::RecoPlan) listenerDict = Dict{String, Any}() for field in propertynames(value) @@ -42,6 +53,12 @@ end export loadPlan loadPlan(m::Module, name::AbstractString, modules::Vector{Module}) = loadPlan(planpath(m, name), modules) +""" + loadPlan(filename::Union{AbstractString, IO}, modules::Vector{Module}) + +Load a `RecoPlan` from a TOML file. The `modules` argument is a vector of modules that contain the types used in the plan. +After loading the plan, the listeners are attached to the properties using `loadListener!`. +""" function loadPlan(filename::Union{AbstractString, IO}, modules::Vector{Module}) dict = TOML.parse(filename) modDict = createModuleDataTypeDict(modules) @@ -179,6 +196,11 @@ function loadListeners!(root::RecoPlan, plan::RecoPlan{T}, dict, modDict) where end end export loadListener +""" + loadListener!(plan, name::Symbol, dict, modDict) + +Load a listener from `dict` and attach it to property `name` of `plan` +""" function loadListener!(plan, name::Symbol, dict, modDict) type = tomlType(dict, modDict) return loadListener!(type, plan, name, dict, modDict) diff --git a/src/StructTransforms.jl b/src/StructTransforms.jl index f10c090..fba9990 100644 --- a/src/StructTransforms.jl +++ b/src/StructTransforms.jl @@ -30,6 +30,17 @@ function toKwargs!(dict, value; flatten::Vector{DataType} = DataType[], ignore:: return dict end +""" + toKwargs(value::AbstractImageReconstructionParameters; kwargs...) + +Convert a `AbstractImageReconstructionParameters` to a `Dict{Symbol, Any}` of each property. + +Optional keyword arguments: +* flatten::Vector{DataType}: Types to flatten, per default only `AbstractImageReconstructionParameters` are flattened. +* ignore::Vector{Symbol}: Properties to ignore. +* default::Dict{Symbol, Any}: Default values for properties that are missing. +* overwrite::Dict{Symbol, Any}: Overwrite values for properties. +""" function toKwargs(v::AbstractImageReconstructionParameters; flatten::Union{Vector{DataType}, Nothing} = nothing, kwargs...) dict = Dict{Symbol, Any}() return toKwargs!(dict, v; flatten = isnothing(flatten) ? [AbstractImageReconstructionParameters] : flatten, kwargs...) @@ -41,7 +52,11 @@ function toKwargs(v::Vector{<:AbstractImageReconstructionParameters}; flatten::U return dict end +""" + fromKwargs(type::Type{T}; kwargs...) where {T} +Create a new instance of `type` from the keyword arguments. Only properties that are part of `type` are considered. +""" function fromKwargs(type::Type{T}; kwargs...) where {T} args = Dict{Symbol, Any}() dict = values(kwargs) @@ -82,11 +97,21 @@ toTOML(x::Array) = toTOML.(x) toTOML(x::Type{T}) where T = string(x) toTOML(x::Nothing) = Dict() +""" + toDict(value) + +Recursively convert `value` to a `Dict{String, Any}` using `toDict!`. +""" function toDict(value) dict = Dict{String, Any}() return toDict!(dict, value) end +""" + toDict!(dict, value) + +Extracts metadata such as the module and type name from `value` and adds it to `dict`. The value-representation of `value` is added using `toDictValue!`. +""" function toDict!(dict, value) dict[MODULE_TAG] = toDictModule(value) dict[TYPE_TAG] = toDictType(value) @@ -95,6 +120,11 @@ function toDict!(dict, value) end toDictModule(value) = parentmodule(typeof(value)) toDictType(value) = nameof(typeof(value)) +""" + toDictValue!(dict, value) + +Extracts the value-representation of `value` and adds it to `dict`. The default implementation for structs with fields adds each field of the argument as a key-value pair with the value being provided by the `toDictValue` function. +""" function toDictValue!(dict, value) for field in fieldnames(typeof(value)) dict[string(field)] = toDictValue(fieldtype(typeof(value), field), getfield(value, field)) @@ -106,6 +136,11 @@ function toDictValue!(dict, value::Function) # NOP end +""" + toDictValue(value) + +Transform `value` to a value-representation in a dict that can later be serialized as a TOML file. +""" toDictValue(type, value) = toDictValue(value) function toDictValue(x) if fieldcount(typeof(x)) > 0