diff --git a/dev/.documenter-siteinfo.json b/dev/.documenter-siteinfo.json index 7c38fa6..3dc118f 100644 --- a/dev/.documenter-siteinfo.json +++ b/dev/.documenter-siteinfo.json @@ -1 +1 @@ -{"documenter":{"julia_version":"1.11.1","generation_timestamp":"2024-11-07T10:16:13","documenter_version":"1.7.0"}} \ No newline at end of file +{"documenter":{"julia_version":"1.11.1","generation_timestamp":"2024-11-14T14:00:34","documenter_version":"1.8.0"}} \ No newline at end of file diff --git a/dev/assets/documenter.js b/dev/assets/documenter.js index 82252a1..7d68cd8 100644 --- a/dev/assets/documenter.js +++ b/dev/assets/documenter.js @@ -612,176 +612,194 @@ function worker_function(documenterSearchIndex, documenterBaseURL, filters) { }; } -// `worker = Threads.@spawn worker_function(documenterSearchIndex)`, but in JavaScript! -const filters = [ - ...new Set(documenterSearchIndex["docs"].map((x) => x.category)), -]; -const worker_str = - "(" + - worker_function.toString() + - ")(" + - JSON.stringify(documenterSearchIndex["docs"]) + - "," + - JSON.stringify(documenterBaseURL) + - "," + - JSON.stringify(filters) + - ")"; -const worker_blob = new Blob([worker_str], { type: "text/javascript" }); -const worker = new Worker(URL.createObjectURL(worker_blob)); - /////// SEARCH MAIN /////// -// Whether the worker is currently handling a search. This is a boolean -// as the worker only ever handles 1 or 0 searches at a time. -var worker_is_running = false; - -// The last search text that was sent to the worker. This is used to determine -// if the worker should be launched again when it reports back results. -var last_search_text = ""; - -// The results of the last search. This, in combination with the state of the filters -// in the DOM, is used compute the results to display on calls to update_search. -var unfiltered_results = []; - -// Which filter is currently selected -var selected_filter = ""; - -$(document).on("input", ".documenter-search-input", function (event) { - if (!worker_is_running) { - launch_search(); - } -}); - -function launch_search() { - worker_is_running = true; - last_search_text = $(".documenter-search-input").val(); - worker.postMessage(last_search_text); -} - -worker.onmessage = function (e) { - if (last_search_text !== $(".documenter-search-input").val()) { - launch_search(); - } else { - worker_is_running = false; - } - - unfiltered_results = e.data; - update_search(); -}; +function runSearchMainCode() { + // `worker = Threads.@spawn worker_function(documenterSearchIndex)`, but in JavaScript! + const filters = [ + ...new Set(documenterSearchIndex["docs"].map((x) => x.category)), + ]; + const worker_str = + "(" + + worker_function.toString() + + ")(" + + JSON.stringify(documenterSearchIndex["docs"]) + + "," + + JSON.stringify(documenterBaseURL) + + "," + + JSON.stringify(filters) + + ")"; + const worker_blob = new Blob([worker_str], { type: "text/javascript" }); + const worker = new Worker(URL.createObjectURL(worker_blob)); + + // Whether the worker is currently handling a search. This is a boolean + // as the worker only ever handles 1 or 0 searches at a time. + var worker_is_running = false; + + // The last search text that was sent to the worker. This is used to determine + // if the worker should be launched again when it reports back results. + var last_search_text = ""; + + // The results of the last search. This, in combination with the state of the filters + // in the DOM, is used compute the results to display on calls to update_search. + var unfiltered_results = []; + + // Which filter is currently selected + var selected_filter = ""; + + $(document).on("input", ".documenter-search-input", function (event) { + if (!worker_is_running) { + launch_search(); + } + }); -$(document).on("click", ".search-filter", function () { - if ($(this).hasClass("search-filter-selected")) { - selected_filter = ""; - } else { - selected_filter = $(this).text().toLowerCase(); + function launch_search() { + worker_is_running = true; + last_search_text = $(".documenter-search-input").val(); + worker.postMessage(last_search_text); } - // This updates search results and toggles classes for UI: - update_search(); -}); + worker.onmessage = function (e) { + if (last_search_text !== $(".documenter-search-input").val()) { + launch_search(); + } else { + worker_is_running = false; + } -/** - * Make/Update the search component - */ -function update_search() { - let querystring = $(".documenter-search-input").val(); + unfiltered_results = e.data; + update_search(); + }; - if (querystring.trim()) { - if (selected_filter == "") { - results = unfiltered_results; + $(document).on("click", ".search-filter", function () { + if ($(this).hasClass("search-filter-selected")) { + selected_filter = ""; } else { - results = unfiltered_results.filter((result) => { - return selected_filter == result.category.toLowerCase(); - }); + selected_filter = $(this).text().toLowerCase(); } - let search_result_container = ``; - let modal_filters = make_modal_body_filters(); - let search_divider = `
`; + // This updates search results and toggles classes for UI: + update_search(); + }); - if (results.length) { - let links = []; - let count = 0; - let search_results = ""; - - for (var i = 0, n = results.length; i < n && count < 200; ++i) { - let result = results[i]; - if (result.location && !links.includes(result.location)) { - search_results += result.div; - count++; - links.push(result.location); - } - } + /** + * Make/Update the search component + */ + function update_search() { + let querystring = $(".documenter-search-input").val(); - if (count == 1) { - count_str = "1 result"; - } else if (count == 200) { - count_str = "200+ results"; + if (querystring.trim()) { + if (selected_filter == "") { + results = unfiltered_results; } else { - count_str = count + " results"; + results = unfiltered_results.filter((result) => { + return selected_filter == result.category.toLowerCase(); + }); } - let result_count = `
${count_str}
`; - search_result_container = ` + let search_result_container = ``; + let modal_filters = make_modal_body_filters(); + let search_divider = `
`; + + if (results.length) { + let links = []; + let count = 0; + let search_results = ""; + + for (var i = 0, n = results.length; i < n && count < 200; ++i) { + let result = results[i]; + if (result.location && !links.includes(result.location)) { + search_results += result.div; + count++; + links.push(result.location); + } + } + + if (count == 1) { + count_str = "1 result"; + } else if (count == 200) { + count_str = "200+ results"; + } else { + count_str = count + " results"; + } + let result_count = `
${count_str}
`; + + search_result_container = ` +
+ ${modal_filters} + ${search_divider} + ${result_count} +
+ ${search_results} +
+
+ `; + } else { + search_result_container = `
${modal_filters} ${search_divider} - ${result_count} -
- ${search_results} -
-
+
0 result(s)
+ +
No result found!
`; - } else { - search_result_container = ` -
- ${modal_filters} - ${search_divider} -
0 result(s)
-
-
No result found!
- `; - } + } - if ($(".search-modal-card-body").hasClass("is-justify-content-center")) { - $(".search-modal-card-body").removeClass("is-justify-content-center"); - } + if ($(".search-modal-card-body").hasClass("is-justify-content-center")) { + $(".search-modal-card-body").removeClass("is-justify-content-center"); + } - $(".search-modal-card-body").html(search_result_container); - } else { - if (!$(".search-modal-card-body").hasClass("is-justify-content-center")) { - $(".search-modal-card-body").addClass("is-justify-content-center"); + $(".search-modal-card-body").html(search_result_container); + } else { + if (!$(".search-modal-card-body").hasClass("is-justify-content-center")) { + $(".search-modal-card-body").addClass("is-justify-content-center"); + } + + $(".search-modal-card-body").html(` +
Type something to get started!
+ `); } + } - $(".search-modal-card-body").html(` -
Type something to get started!
- `); + /** + * Make the modal filter html + * + * @returns string + */ + function make_modal_body_filters() { + let str = filters + .map((val) => { + if (selected_filter == val.toLowerCase()) { + return `${val}`; + } else { + return `${val}`; + } + }) + .join(""); + + return ` +
+ Filters: + ${str} +
`; } } -/** - * Make the modal filter html - * - * @returns string - */ -function make_modal_body_filters() { - let str = filters - .map((val) => { - if (selected_filter == val.toLowerCase()) { - return `${val}`; - } else { - return `${val}`; - } - }) - .join(""); - - return ` -
- Filters: - ${str} -
`; +function waitUntilSearchIndexAvailable() { + // It is possible that the documenter.js script runs before the page + // has finished loading and documenterSearchIndex gets defined. + // So we need to wait until the search index actually loads before setting + // up all the search-related stuff. + if (typeof documenterSearchIndex !== "undefined") { + runSearchMainCode(); + } else { + console.warn("Search Index not available, waiting"); + setTimeout(waitUntilSearchIndexAvailable, 1000); + } } +// The actual entry point to the search code +waitUntilSearchIndexAvailable(); + }) //////////////////////////////////////////////////////////////////////////////// require(['jquery'], function($) { diff --git a/dev/basicReconstruction.html b/dev/basicReconstruction.html index 4150210..4b67f29 100644 --- a/dev/basicReconstruction.html +++ b/dev/basicReconstruction.html @@ -16,4 +16,4 @@ spectralCleaning=true, fgFrames=1:10, bgCorrectionInternal=false, noiseFreqThresh=0.0, kargs...)

This function is responsible for loading the measurement data and potential background data that is subtracted from the measurements. For any frame to be reconstructed, the low level reconstruction routine is called.

Low Level Reconstruction

Finally, we have arrived at the low level reconstruction routine that has the signature

function reconstruction(S, u::Array; sparseTrafo = nothing,
                         lambd=0, progress=nothing, solver = "Kaczmarz",
-                        weights=nothing, kargs...)

One can see that it requires the system matrix S and the measurements u to be already loaded.

We note that S is typeless for a reason here. For a regular reconstruction one will basically feed in an Array{ComplexF32,2} in here, although more precisely it will be a Transposed version of that type if the Kaczmarz algorithm is being used for efficiency reasons.

However, in case that matrix compression is applied S will be of type SparseMatrixCSC. And for Multi-Patch Reconstruction S will be of type MultiPatchOperator. Hence, the solvers are implemented in a very generic way and require only certain functions to be implemented. The low level reconstruction method calls one of the solvers from RegularizedLeastSquares.jl.

+ weights=nothing, kargs...)

One can see that it requires the system matrix S and the measurements u to be already loaded.

We note that S is typeless for a reason here. For a regular reconstruction one will basically feed in an Array{ComplexF32,2} in here, although more precisely it will be a Transposed version of that type if the Kaczmarz algorithm is being used for efficiency reasons.

However, in case that matrix compression is applied S will be of type SparseMatrixCSC. And for Multi-Patch Reconstruction S will be of type MultiPatchOperator. Hence, the solvers are implemented in a very generic way and require only certain functions to be implemented. The low level reconstruction method calls one of the solvers from RegularizedLeastSquares.jl.

diff --git a/dev/datasetStore.html b/dev/datasetStore.html index 32175a3..aa20470 100644 --- a/dev/datasetStore.html +++ b/dev/datasetStore.html @@ -1,2 +1,2 @@ -- · MPI Reconstruction

Layers

The reconstruction function has several layers starting from a high level over several middle layer to low layer functions. The most high level method has the following signature `$julia reconstruction(d::MDFDatasetStore, study::Study, exp::Experiment, recoParams::Dict{String,Any})$

+- · MPI Reconstruction

Layers

The reconstruction function has several layers starting from a high level over several middle layer to low layer functions. The most high level method has the following signature `$julia reconstruction(d::MDFDatasetStore, study::Study, exp::Experiment, recoParams::Dict{String,Any})$

diff --git a/dev/index.html b/dev/index.html index 48f12b4..a5745af 100644 --- a/dev/index.html +++ b/dev/index.html @@ -1,2 +1,2 @@ -Home · MPI Reconstruction

MPIReco.jl

Julia package for the reconstruction of magnetic particle imaging (MPI) data

Introduction

This project provides functions for the reconstruction of MPI data. The project is implemented in the programming language Julia and contains algorithms for

Key features are

  • Frequency filtering for memory efficient reconstruction. Only frequencies used during reconstructions are loaded into memory.
  • Different solvers provided by the package RegularizedLeastSquares.jl
  • High-level until low-level reconstruction providing maximum flexibility for the user
  • Spectral leakage correction (implemented in MPIFiles.jl)

Installation

Start julia and open the package mode by entering ]. Then enter

add MPIReco

This will install the packages MPIReco.jl and all its dependencies. In particular this will install the core dependencies MPIFiles and RegularizedLeastSquares.

License / Terms of Usage

The source code of this project is licensed under the MIT license. This implies that you are free to use, share, and adapt it. However, please give appropriate credit by citing the project.

Contact

If you have problems using the software, find mistakes, or have general questions please use the issue tracker to contact us.

Contributors

+Home · MPI Reconstruction

MPIReco.jl

Julia package for the reconstruction of magnetic particle imaging (MPI) data

Introduction

This project provides functions for the reconstruction of MPI data. The project is implemented in the programming language Julia and contains algorithms for

Key features are

  • Frequency filtering for memory efficient reconstruction. Only frequencies used during reconstructions are loaded into memory.
  • Different solvers provided by the package RegularizedLeastSquares.jl
  • High-level until low-level reconstruction providing maximum flexibility for the user
  • Spectral leakage correction (implemented in MPIFiles.jl)

Installation

Start julia and open the package mode by entering ]. Then enter

add MPIReco

This will install the packages MPIReco.jl and all its dependencies. In particular this will install the core dependencies MPIFiles and RegularizedLeastSquares.

License / Terms of Usage

The source code of this project is licensed under the MIT license. This implies that you are free to use, share, and adapt it. However, please give appropriate credit by citing the project.

Contact

If you have problems using the software, find mistakes, or have general questions please use the issue tracker to contact us.

Contributors

diff --git a/dev/matrixCompression.html b/dev/matrixCompression.html index 1be92de..9108b3b 100644 --- a/dev/matrixCompression.html +++ b/dev/matrixCompression.html @@ -1,2 +1,2 @@ -Compression · MPI Reconstruction

Matrix-Compression Techniques

The reconstruction can be accelerated by applying matrix compression. To this end, the system matrix S is transformed into a different domain by applying a basis transformation on the rows of the system matrix. In MPIReco.jl, matrix compression can be enabled by specifying sparseTrafo which can be "DCT-IV" or "FFT".

The transformations can be restricted to the drive-field field-of-view by setting useDFFoV = true. The compression factor that controls how many coefficients are dropped after application of the transformation is controlled by the parameter redFactor. For instance a reduction factor of redFactor = 0.01 will drop 99 % of the data.

+Compression · MPI Reconstruction

Matrix-Compression Techniques

The reconstruction can be accelerated by applying matrix compression. To this end, the system matrix S is transformed into a different domain by applying a basis transformation on the rows of the system matrix. In MPIReco.jl, matrix compression can be enabled by specifying sparseTrafo which can be "DCT-IV" or "FFT".

The transformations can be restricted to the drive-field field-of-view by setting useDFFoV = true. The compression factor that controls how many coefficients are dropped after application of the transformation is controlled by the parameter redFactor. For instance a reduction factor of redFactor = 0.01 will drop 99 % of the data.

diff --git a/dev/multiContrast.html b/dev/multiContrast.html index 4914d68..23110fc 100644 --- a/dev/multiContrast.html +++ b/dev/multiContrast.html @@ -2,4 +2,4 @@ Multi-Contrast · MPI Reconstruction

Multi-Contrast Reconstruction

Until now we have discussed single-contrast reconstruction in which case the reconstructed image c has a singleton first dimension. To perform multi-contrast reconstruction one has to specify multiple system matrices

bSFa = MPIFile(filenameA)
 bSFb = MPIFile(filenameB)

and can then invoke

c = reconstruction([bSFa, bSFb], b;
                     SNRThresh=5, frames=1, minFreq=80e3,
-                    recChannels=1:2, iterations=1)

Now one can access the first and second channel by c[1,:,:,:] and c[2,:,:,:].

+ recChannels=1:2, iterations=1)

Now one can access the first and second channel by c[1,:,:,:] and c[2,:,:,:].

diff --git a/dev/multiPatch.html b/dev/multiPatch.html index dc4bd8a..07e539c 100644 --- a/dev/multiPatch.html +++ b/dev/multiPatch.html @@ -18,4 +18,4 @@ c4 = reconstruction(bSFs, b; SNRThresh=5, frames=1, minFreq=80e3, recChannels=1:2,iterations=1, spectralLeakageCorrection=false, mapping=mapping, systemMatrices = S, SFGridCenter=SFGridCenter, - FFPos=FFPos, FFPosSF=FFPos) + FFPos=FFPos, FFPosSF=FFPos) diff --git a/dev/overview.html b/dev/overview.html index 1dbce17..d83dad1 100644 --- a/dev/overview.html +++ b/dev/overview.html @@ -12,4 +12,4 @@ recChannels=1:2, iterations=1, spectralLeakageCorrection=true) -

Lets go through that script step by step. First, we create handles for the system matrix and the measurement data. Both are of the type MPIFile which is an abstract type that can for instance be an MDFFile or a BrukerFile.

Using the handles to the MPI datasets we can call the reconstruction function that has various variants depending on the types that are passed to it. Here, we exploit the multiple dispatch mechanism of julia. In addition to the file handles we also apply several reconstruction parameters using keyword arguments. In this case, we set the SNR threshold to 5 implying that only matrix rows with an SNR above 5 are used during reconstruction. The parameter frame decides which frame of the measured data should be reconstructed.

The object c is of type ImageMeta and contains not only the reconstructed data but also several metadata such as the reconstruction parameters being used. More details on the return type are discussed in the Reconstruction Results

Data Storage

One can store the reconstruction result into an MDF file by calling

saveRecoData("filename.mdf", c)

In order to load the data one calls

c = loadRecoData("filename.mdf", c)

We will next take a closer look at different forms of the reconstruction routine.

+

Lets go through that script step by step. First, we create handles for the system matrix and the measurement data. Both are of the type MPIFile which is an abstract type that can for instance be an MDFFile or a BrukerFile.

Using the handles to the MPI datasets we can call the reconstruction function that has various variants depending on the types that are passed to it. Here, we exploit the multiple dispatch mechanism of julia. In addition to the file handles we also apply several reconstruction parameters using keyword arguments. In this case, we set the SNR threshold to 5 implying that only matrix rows with an SNR above 5 are used during reconstruction. The parameter frame decides which frame of the measured data should be reconstructed.

The object c is of type ImageMeta and contains not only the reconstructed data but also several metadata such as the reconstruction parameters being used. More details on the return type are discussed in the Reconstruction Results

Data Storage

One can store the reconstruction result into an MDF file by calling

saveRecoData("filename.mdf", c)

In order to load the data one calls

c = loadRecoData("filename.mdf", c)

We will next take a closer look at different forms of the reconstruction routine.

diff --git a/dev/parameters.html b/dev/parameters.html index 4e79a6f..bf05143 100644 --- a/dev/parameters.html +++ b/dev/parameters.html @@ -1,2 +1,2 @@ -Parameters · MPI Reconstruction
+Parameters · MPI Reconstruction
diff --git a/dev/recoResults.html b/dev/recoResults.html index 8f8b473..6931bdf 100644 --- a/dev/recoResults.html +++ b/dev/recoResults.html @@ -1,2 +1,2 @@ -Results · MPI Reconstruction

Reconstruction Results

The object c is of type ImageMeta and contains not only the reconstructed data but also several metadata such as the reconstruction parameters being used. c has in total 5 dimensions. The first dimension encodes multi-spectral channels. Dimensions 2-4 encode the three spatial dimensions. The last dimension contains the number of frames being stored in c.

+Results · MPI Reconstruction

Reconstruction Results

The object c is of type ImageMeta and contains not only the reconstructed data but also several metadata such as the reconstruction parameters being used. c has in total 5 dimensions. The first dimension encodes multi-spectral channels. Dimensions 2-4 encode the three spatial dimensions. The last dimension contains the number of frames being stored in c.