diff --git a/.gitignore b/.gitignore index fe3bee4..4576d7d 100755 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -test.ipynb +test*.ipynb *dask-worker* *.idea *DS_Store diff --git a/README.md b/README.md index 34c7a7a..3af3a51 100755 --- a/README.md +++ b/README.md @@ -8,7 +8,10 @@ Currently, **launchcontainers** works along with [anatROIs](https://github.com/garikoitz/anatROIs), [RTP-preproc](https://github.com/garikoitz/rtp-preproc), and [RTP2-pipeline](https://github.com/garikoitz/rtp-pipeline). ## NEW FEATURES 0.3.x -Now you can `pip install launchcontainers==0.3.16 ` and use it in the command line! +Now you can `pip install launchcontainers==0.3.18 ` and use it in the command line! +* Changed rtp/rtp2-preproc multishell option to separateed_shell_files +* Edited lc_config.yaml comment about dask_worker options +* Fixed error message by dask progress (0.3.18) * launchcontainers --copy_configs "~/path/to/working_directory" will copy the corresponding config files to your specified directory! * We updated the lc_config.yaml for RTP2-pipelines, please have a look! diff --git a/example_configs/0.3.0/example_lc_config.yaml b/example_configs/0.3.0/lc_config_030.yaml similarity index 94% rename from example_configs/0.3.0/example_lc_config.yaml rename to example_configs/0.3.0/lc_config_030.yaml index 9190558..59b007d 100644 --- a/example_configs/0.3.0/example_lc_config.yaml +++ b/example_configs/0.3.0/lc_config_030.yaml @@ -27,6 +27,7 @@ general: print_command_only: False # Log directory to store launchcontainers logging # VALID OPTIONS: analysis_dir or full path you want + # This log dir will be used for dask_log and launchcontainer log log_dir: analysis_dir # Name of launchcontainers log file log_filename: lc_log @@ -57,8 +58,8 @@ container_specific: # Analysis name anat_analysis_name: fMRIprep_brainmask # optional - # if your encoding direction DWI is multishell sequence - multishell: True + # if your encoding direction DWI is multishell sequence and they are in seperate files + separated_shell_files: False # If reverse phase encoding is applied in the sequence # It checks if there is a reverse phase encoding acquisition # Old dcm2nixx will not create empty bvec and bval files if there was an acquisition with just b0-s @@ -106,8 +107,8 @@ container_specific: # Analysis name anat_analysis_name: control_points_02 # optional - # if your encoding direction DWI is multishell sequence - multishell: True + # if your encoding direction DWI is multishell sequence and they are in seperate files + separated_shell_files: False # If reverse phase encoding is applied in the sequence # It checks if there is a reverse phase encoding acquisition # if not, launchcontainers will create mock files @@ -188,7 +189,7 @@ host_options: # Copy the example list: for BCBL we need ['/bcbl', '/tmp', '/export']; for okazaki we need ['/fileserver', '/tmp'] bind_options: ['/bcbl', '/tmp', '/scratch', '/export'] manager: 'local' - # This can only be serial or parallel, any other options will make it fail. + # This can only be serial, parallel, or dask_worker any other options will make it fail. launch_mode: 'serial' # Arguments below only affect to parallel launch mode njobs: 5 diff --git a/example_configs/0.3.0/new_lc_config.yaml b/example_configs/0.3.0/new_lc_config.yaml deleted file mode 100755 index a8523a4..0000000 --- a/example_configs/0.3.0/new_lc_config.yaml +++ /dev/null @@ -1,132 +0,0 @@ -# Input guide: -# replace the part of this template config.yaml with your own dataset -# Input value type: -# str: string, python will read if as string, type a space after : and input the string directly -# bool: boolean value, True of False, needs to be capitalize first letter -# int: integer, similar to string, type a space after : -# None: python None type, Usually used for optional arguement, don't put anything after column, not even "" -general: - # general setting of launchcontainer soft - basedir: /home/tlei/tlei/multishell_dwi # Base directory of project - bidsdir_name: nifti # Name of bids directory, 1 level under basedir, must contain dataset_description - containerdir: /home/tlei/tlei/LMC_DWI_course/containers # Directory contains singularity images - container: rtp2-preproc # Name of the container - # VALID OPTIONS: freesurferator, rtp2-preproc, rtp2-pipeline, anatrois, rtppreproc, rtp-pipeline, l1_glm, fmriprep - analysis_name: test_rewrite_bval # Name of analysis folder - host: local # Place the computing will be held. - # VALID OPTIONS: local, BCBL, DIPC. - force: True # Whether force to overwrite - print_command_only: False # Verbosity of command-line console. If true, only print information at level: CRITICAL (based on python logging package) - log_dir: analysis_dir # Log directory to store launchcontainers logging - # VALID OPTIONS: analysis_dir or full path you want - log_filename: lc_log # Name of launchcontainers log file - -container_specific: - anatrois: - version: 4.6.1-7.3.2 # Version identifier for container - pre_fs: True # Pre-run freesurfer or not? - prefs_dir_name: anatrois_4.6.1-7.3.2 # Directory name of your pre-run freesurfer, this directory should be under /basedir/bidsdir/derivatives - prefs_analysis_name: "6prefs_from_fmriprep" # Analysis name of pre-run freesurfer - prefs_zipname: '^anatrois_S.*\.zip$' # A super identifier to find the pattern no need to change - # optional - annotfile: # Freesurfer annotefiles - mniroizip: # MNI roi zip file - - rtppreproc: - version: 1.2.0-3.0.3 # Version identifier for container - precontainer_anat: anatrois_4.6.1-7.3.2 # anatrois or freesurferator dir, used to find the brainmask - anat_analysis_name: fMRIprep_brainmask # Analysis name - # optional - multishell: True # if your encoding direction DWI is multishell sequence - rpe: True # If reverse phase encoding is applied in the sequence - # It checks if there is a reverse phase encoding acquisition - # Old dcm2nixx will not create empty bvec and bval files if there was an acquisition with just b0-s - - - rtp-pipeline: - version: 4.5.2-3.0.3 # Version identifier for container - precontainer_anat: anatrois_4.6.1-7.3.2 # anatrois or freesurferator dir, used to find the brainmask and fs/ROIs - anat_analysis_name: fulltract_anatrerun # Analysis name - precontainer_preproc: rtppreproc_1.2.0-3.0.3 # rtppreproc or rtp2-preproc dir, used to find the dwi.nii.gz, bvec and bval - preproc_analysis_name: 6sub_wrongbvec # Analysis name - - freesurferator: - version: 0.2.0-7.4.1rc19 # Version identifier for container - pre_fs: True # Pre-run freesurfer or not? - prefs_dir_name: freesurferator_0.2.0-7.4.1rc19 # Directory name of your pre-run freesurfer, this directory should be under /basedir/bidsdir/derivatives - prefs_analysis_name: control_points_02 # Analysis name of pre-run freesurfer - prefs_zipname: '^freesurferator_S.*\.zip$' # A super identifier to find the pattern - control_points: False # If you want to use the control points created in the previous analysis (control.dat), set this True: - prefs_unzipname: 'S.*$' # If you created control points, you'll have an unzipped folder in the output analysis. Fill prefs_unzipname - # with the name of the unzipped folder to let launchcontainers create a symbolic link to the control.dat - # optional - annotfile: /home/tlei/Desktop/annnnooott.zip # Freesurfer annotefiles - mniroizip: /home/tlei/Desktop/FG.nii.gz # MNI roi zip file - - rtp2-preproc: - version: 0.1.0_3.0.4rc31 # Version identifier for container - precontainer_anat: freesurferator_0.2.0-7.4.1rc19 # anatrois or freesurferator dir, used to find the brainmask - anat_analysis_name: control_points_02 # Analysis name - # optional - multishell: True # if your encoding direction DWI is multishell sequence - rpe: True # If reverse phase encoding is applied in the sequence - # It checks if there is a reverse phase encoding acquisition - # if not, launchcontainers will create mock files - qmap_nifti: /home/tlei/Desktop/FG.nii.gz # Full Path to qunatitative MRI maps, must be nifti format - rtp2-pipeline: - version: 0.1.0_3.0.4rc20 # Version identifier for container - precontainer_anat: freesurferator_0.2.0-7.4.1rc19 # anatrois or freesurferator dir, used to find the brainmask and fs/ROIs - anat_analysis_name: control_points_02 # Analysis name - precontainer_preproc: rtp2-preproc_0.1.0_3.0.4rc31 # rtppreproc or rtp2-preproc dir, used to find the dwi.nii.gz, bvec and bval - preproc_analysis_name: control_points_02 # Analysis name - #optional - tractparams: /home/tlei/tlei/LMC_DWI_course/scripts/tractparams_short_course.csv # Path to tractparams files, needs to be a .csv - fsmask: /home/tlei/Desktop/FG.nii.gz # Path to brain.nii.gz of freesurfer If use fsmask or define manually, this option is set in case you need - qmap_zip: /home/tlei/Desktop/annnnooott.zip # zip file for rtp2-pipeline -host_options: - # SGE manager - BCBL: - use_module: False - apptainer: apptainer/latest - maxwall: 10 - manager: sge - name: "anatrois" - # Dask worker options - cores: 6 # Total number of cores per job (it was core for BCBL) - memory: 32G # Total amount of memory per job (it was mem for BCBL) - processes: 1 # Number of Python processes per job - interface: lo # Network interface to use like eth0 or ib0 - death-timeout: 100 # Number of seconds to wait if a worker can not find a scheduler - local-directory: null # Location of fast local storage like /scratch or $TMPDIR - queue: long.q # It was que in BCBL - project: null - walltime: 25:30:00' - extra: [] - env-extra: [] - job-extra: [] - resource-spec: null - bind_options: ['/bcbl', '/tmp','/scratch'] - # SLURM manager - DIPC: - memory: 32G - queue: regular - cores: 24 - walltime: '22:00:00' - use_module: False # for SLURM, it is always False - apptainer: Singularity/3.5.3-GCC-8.3.0 - manager: slurm - system: scratch - name: "anatrois" - tmpdir: /scratch/llecca/tmp - bind_options: ['/scratch'] - # Local machine, ubuntu, MacOS - local: - use_module: False # if the local machine use module load this option will give you different version of job-queue cmd - apptainer: apptainer/latest - bind_options: ['/bcbl', '/tmp', '/scratch', '/export'] # Copy the example list: for BCBL we need ['/bcbl', '/tmp', '/export']; for okazaki we need ['/fileserver', '/tmp'] - manager: 'local' - launch_mode: 'serial' # This can only be serial or parallel, any other options will make it fail. - # Arguments below only affect to parallel launch mode - njobs: 5 # - memory_limit: '32GiB' # - threads_per_worker: 6 # diff --git a/example_configs/0.4.0/l1_glm.yaml b/example_configs/0.4.0/l1_glm.yaml new file mode 100644 index 0000000..13ad0b0 --- /dev/null +++ b/example_configs/0.4.0/l1_glm.yaml @@ -0,0 +1,166 @@ + +experiment: + task: fLoc + # Runs to process + run_nums: ["01", "02", "03", "04", "05", "06"] + # dummy scan of your experiment + dummy_scans: 6 +model: + # fMRI time-series space: volumetric or surface based + # valid options: EPI, MNI, fsnative, fsaverage. Currently only valid for fsnative and fsaverage + space: fsnative + + # if is surface based, choose which hemisphere, input is a list, by default it would be both hemisphere because it is fast + hemis: ['lh','rh'] + + # TODO: whether mask the image to conduct the GLM only on some part to increase power + mask_EPI: False + + # Valid option: fslabel, bimap.nii + # if mask EPI is true, then you need to choose mask method: either binary mask xx.nii or freesurfer label + mask_method: fslabel + + # The things between ?h.xx.label + # if space is freesurfer space, give the label name, TODO: if space is individual T1, + fslabel_name: votc + + # full path or simply the name of this file, + # if it is not full path, then you need to put it under the default place /basedir/bidsdir/derivatives/l1_glm/analysis-xxx/ + # if mask_methods is bimap.nii this option will be used + # please, put this nii file under the analysis folder of the l1_glm, + maskfile_nii_path: /path/to/nii + + # From the fMRIPrep command, align slice time correction to start of TR + slice_time_ref: 0.5 #0.5 or 0 + + # HRF model to use + hrf_model: spm + + # Do not high_pass since we use fMRIPrep's cosine regressors + drift_model: None # Do not high_pass since we use fMRIPrep's cosine regressors + + # Do not high_pass since we use fMRIPrep's cosine regressors + drift_order: 0 # Do not high_pass since we use fMRIPrep's cosine regressors + + # Do not high_pass since we use fMRIPrep's cosine regressors + high_pass: None # Do not high_pass since we use fMRIPrep's cosine regressors + + # Motion regressors to use + motion_regressors: [ + "framewise_displacement", + "rot_x", + "rot_y", + "rot_z", + "trans_x", + "trans_y", + "trans_z", + ] + + # currently don't know how to use the following three + use_acompcor: True + use_non_steady_state: True + use_consine_regressors: True + +contrast_groups: + All: + - adult + - child + - body + - limb + - ES_word + - ES_FF + - ES_CB + - ES_CS + - ES_SC + AllnoLimbs: + - adult + - child + - ES_word + - ES_FF + - ES_CB + - ES_CS + - ES_SC + AllnoWord: + - adult + - child + - body + - limb + - ES_FF + - ES_CB + - ES_CS + - ES_SC + AllnoWordnoLEX: + - adult + - child + - body + - limb + - ES_CB + - ES_SC + AllnoFace: + - body + - limb + - ES_word + - ES_FF + - ES_CB + - ES_CS + - ES_SC + LEXPER: + - ES_FF + - ES_CB + - ES_CS + - ES_SC + PER: + - ES_CB + - ES_SC + LEX: + - ES_CS + - ES_FF + Faces: + - adult + - child + Word: + - ES_word + Limbs: + - body + - limb + CB: + - ES_CB + SC: + - ES_SC + CS: + - ES_CS + FF: + - ES_FF + Adult: + - adult + Child: + - child + Limb: + - limb + Body: + - body +contrasts: + - AllvsNull + - PERvsNull + - LEXvsNull + - PERvsLEX + - WordvsLEX + - WordvsPER + - WordvsLEXPER + - WordvsAllnoWordnoLEX + - WordvsAllnoWord + - LEXvsAllnoWordnoLEX + - SCvsCB + - CSvsFF + - FacesvsNull + - FacesvsLEX + - FacesvsPER + - FacesvsLEXPER + - FacesvsAllnoFace + - AdultvsChild + - LimbsvsNull + - LimbsvsLEX + - LimbsvsPER + - LimbsvsLEXPER + - LimbsvsAllnoLimbs + - BodyvsLimb \ No newline at end of file diff --git a/example_configs/0.4.0/lc_config_0.4.yaml b/example_configs/0.4.0/lc_config_0.4.yaml new file mode 100755 index 0000000..264d95d --- /dev/null +++ b/example_configs/0.4.0/lc_config_0.4.yaml @@ -0,0 +1,241 @@ +# Input guide: +# replace the part of this template config.yaml with your own dataset +# Input value type: +# str: string, python will read if as string, type a space after : and input the string directly +# bool: boolean value, True of False, needs to be capitalize first letter +# int: integer, similar to string, type a space after : +# None: python None type, Usually used for optional arguement, don't put anything after column, not even "" + +general: + # general setting of launchcontainer soft + # Base directory of project + basedir: /Users/tiger/Desktop/VOTCLOC + # Name of bids directory, 1 level under basedir, must contain dataset_description + bidsdir_name: BIDS + # Directory contains singularity images + # if it is a python project, no need + containerdir: /bcbl/home/public/Gari/singularity_images + # Name of the container + # VALID OPTIONS: freesurferator, rtp2-preproc, rtp2-pipeline, anatrois, rtppreproc, rtp-pipeline, l1_glm, fmriprep + container: l1_glm + # Name of analysis folder + analysis_name: test_prepare_glm + # Place the computing will be held. + # VALID OPTIONS: local, BCBL, DIPC. + host: local + # Whether force to overwrite + force: True + # Verbosity of command-line console. If true, only print information at level: CRITICAL (based on python logging package) + print_command_only: False + # Log directory to store launchcontainers logging + # VALID OPTIONS: analysis_dir or full path you want + log_dir: analysis_dir + # Name of launchcontainers log file + log_filename: m1_test_0.4.10_l1_glm + +container_specific: + anatrois: + # Version identifier for container + version: 4.6.1-7.3.2 + # Pre-run freesurfer or not? + pre_fs: True + # Directory name of your pre-run freesurfer, this directory should be under /basedir/bidsdir/derivatives + prefs_dir_name: anatrois_4.6.1-7.3.2 + # Analysis name of pre-run freesurfer + prefs_analysis_name: "6prefs_from_fmriprep" + # A super identifier to find the pattern no need to change + prefs_zipname: '^anatrois_S.*\.zip$' + # optional + # Freesurfer annotefiles + annotfile: + # MNI roi zip file + mniroizip: + + rtppreproc: + # Version identifier for container + version: 1.2.0-3.0.3 + # anatrois or freesurferator dir, used to find the brainmask + precontainer_anat: anatrois_4.6.1-7.3.2 + # Analysis name + anat_analysis_name: fMRIprep_brainmask + # optional + # if your encoding direction DWI is multishell sequence + multishell: True + # If reverse phase encoding is applied in the sequence + # It checks if there is a reverse phase encoding acquisition + # Old dcm2nixx will not create empty bvec and bval files if there was an acquisition with just b0-s + rpe: True + + rtp-pipeline: + # Version identifier for container + version: 4.5.2-3.0.3 + # anatrois or freesurferator dir, used to find the brainmask and fs/ROIs + precontainer_anat: anatrois_4.6.1-7.3.2 + # Analysis name + anat_analysis_name: fulltract_anatrerun + # rtppreproc or rtp2-preproc dir, used to find the dwi.nii.gz, bvec and bval + precontainer_preproc: rtppreproc_1.2.0-3.0.3 + # Analysis name + preproc_analysis_name: 6sub_wrongbvec + + freesurferator: + # Version identifier for container + version: 0.2.0-7.4.1rc19 + # Pre-run freesurfer or not? + pre_fs: True + # Directory name of your pre-run freesurfer, this directory should be under /basedir/bidsdir/derivatives + prefs_dir_name: freesurferator_0.2.0-7.4.1rc19 + # Analysis name of pre-run freesurfer + prefs_analysis_name: control_points_02 + # A super identifier to find the pattern + prefs_zipname: '^freesurferator_S.*\.zip$' + # If you want to use the control points created in the previous analysis (control.dat), set this True: + control_points: False + # If you created control points, you'll have an unzipped folder in the output analysis. Fill prefs_unzipname + # with the name of the unzipped folder to let launchcontainers create a symbolic link to the control.dat + prefs_unzipname: 'S.*$' + # optional + # Freesurfer annotefiles + annotfile: + # MNI roi zip file + mniroizip: + + rtp2-preproc: + # Version identifier for container + version: 0.1.0_3.0.4rc31 + # anatrois or freesurferator dir, used to find the brainmask + precontainer_anat: freesurferator_0.2.0-7.4.1rc19 + # Analysis name + anat_analysis_name: control_points_02 + # optional + # if your encoding direction DWI is multishell sequence + multishell: True + # If reverse phase encoding is applied in the sequence + # It checks if there is a reverse phase encoding acquisition + # if not, launchcontainers will create mock files + rpe: True + # Full Path to qunatitative MRI maps, must be nifti format + qmap_nifti: /home/tlei/Desktop/FG.nii.gz + + rtp2-pipeline: + # Version identifier for container + version: 0.1.0_3.0.4rc20 + # anatrois or freesurferator dir, used to find the brainmask and fs/ROIs + precontainer_anat: freesurferator_0.2.0-7.4.1rc19 + # Analysis name + anat_analysis_name: control_points_02 + # rtppreproc or rtp2-preproc dir, used to find the dwi.nii.gz, bvec and bval + precontainer_preproc: rtp2-preproc_0.1.0_3.0.4rc31 + # Analysis name + preproc_analysis_name: control_points_02 + # optional + # Path to tractparams files, needs to be a .csv + tractparams: /home/tlei/tlei/LMC_DWI_course/scripts/tractparams_short_course.csv + # Path to brain.nii.gz of freesurfer If use fsmask or define manually, this option is set in case you need + fsmask: /home/tlei/Desktop/FG.nii.gz + # zip file for rtp2-pipeline + qmap_zip: /home/tlei/Desktop/annnnooott.zip + + l1_glm: + # Version identifier for container + version: default + # fmriprep folder name under derivatives + fmriprep_dir_name: fmriprep + # fmriprep analysis name (The input), used to get the preprocessed fMRI time-series + # output name is in section general:analysis_name + fmriprep_ana_name: rerun_nordic_fmap + # you want to use another freesurfer or not, usually we use the things under fmriprep sourcedata/freesurfer + pre_fs: True + # The dir name of freesurfer folder you want to use + pre_fs_full_path: freesurfer + # The directory of onset folders + onsetdir: /Users/tiger/Desktop/onset + # The folder structure of onset times, Kepa SPM or fLoc or BIDS, if BIDS, do nothing. Else, needs to do something in the prepare mode + onset_format: fLoc + # this one will work in prepare mode, if false, prepare mode will do nothing but keeping the data provenance + # if True, it will do the time-series smoothing using freesurfer/ or nilearn + smooth_time_series: False + # a list of FWHM kernel for doing the smoothing, this will only be used for the prepare mode + # usually I will do 2 and 4 + smooth_kernel: [2,4] + # if use the smoothed time-series. If True, will call the time_series_smooth_kernel + use_smoothed: False + # specify which smoothed time series you will use + time_series_smooth_kernel: 2 + +host_options: + # Default BCBL + BCBL: + # for SGE, it is always false + use_module: False + apptainer: apptainer/latest + maxwall: 10 + manager: sge + name: "anatrois" + # Dask worker options + # Total number of cores per job (it was core for BCBL) + cores: 6 + # Total amount of memory per job (it was mem for BCBL) + memory: 32G + # Number of Python processes per job + processes: 1 + # Network interface to use like eth0 or ib0 + interface: lo + # Number of seconds to wait if a worker can not find a scheduler + death-timeout: 100 + # Location of fast local storage like /scratch or $TMPDIR + local-directory: null + # SGE resource manager options + # It was que in BCBL + queue: long.q + project: null + walltime: 25:30:00' + extra: [] + env-extra: [] + job-extra: [] + resource-spec: null + bind_options: ['/bcbl', '/tmp','/scratch'] + + # Default DIPC + DIPC: + # Total amount of memory per job + memory: 32G + # SLURM queue + queue: regular + # Total number of cores per job + cores: 24 + # Walltime for the job + walltime: '22:00:00' + # for SLURM, it is always false + use_module: False + apptainer: Singularity/3.5.3-GCC-8.3.0 + manager: slurm + system: scratch + name: "anatrois" + tmpdir: /scratch/llecca/tmp + bind_options: ['/scratch'] + + # Local host options + local: + # Dask manager type + manager: local + # if the local machine use module load this option will give you different version of job-queue cmd + use_module: False + apptainer: apptainer/latest + # Copy the example list: for BCBL we need ['/bcbl', '/tmp', '/export']; for okazaki we need ['/fileserver', '/tmp'] + bind_options: ['/bcbl', '/tmp', '/export'] + # This can only be serial or parallel, any other options will make it fail. + launch_mode: parallel + # total cores you ask from your PC + n_cores: 7 + processes: False # default True + # Use Processes (processes=True): + # If your tasks are CPU-bound and you need to avoid the GIL. + # If tasks are memory-intensive and you want memory isolation to prevent memory leaks from affecting other tasks. + # Use Threads (processes=False): + # If your tasks are I/O-bound and can benefit from sharing memory between threads. + # If you have tasks that involve a lot of shared state or require low overhead in terms of process management. + # Memory limit per worker + memory_limit: 8GiB + # If you used dask to launch pipelines, set it to 2, if you used dask to launch l1_glm, set it to an appropriate number, 4 or 6 + threads_per_worker: 2 diff --git a/example_configs/0.4.0/sub_ses_list.txt b/example_configs/0.4.0/sub_ses_list.txt new file mode 100644 index 0000000..17b1acd --- /dev/null +++ b/example_configs/0.4.0/sub_ses_list.txt @@ -0,0 +1,8 @@ +sub,ses,RUN,anat,dwi,func +05,day1VA,False,True,True,True +05,day1VB,False,True,True,True +05,day2VA,False,True,True,True +05,day2VB,False,True,True,True +05,day3PF,True,True,True,True +05,day5BCBL,True,True,True,True +05,day6BCBL,True,True,True,True diff --git a/example_configs/container_specific_example_configs/freesurferator/0.2.0_7.4.1rc19/MNI_ROI_ATR_roi1_R.nii b/example_configs/container_specific_example_configs/freesurferator/0.2.0_7.4.1rc19/MNI_ROI_ATR_roi1_R.nii new file mode 100644 index 0000000..02110d9 Binary files /dev/null and b/example_configs/container_specific_example_configs/freesurferator/0.2.0_7.4.1rc19/MNI_ROI_ATR_roi1_R.nii differ diff --git a/example_configs/container_specific_example_configs/l1_glm/l1_glm.yaml b/example_configs/container_specific_example_configs/l1_glm/l1_glm.yaml new file mode 100644 index 0000000..13ad0b0 --- /dev/null +++ b/example_configs/container_specific_example_configs/l1_glm/l1_glm.yaml @@ -0,0 +1,166 @@ + +experiment: + task: fLoc + # Runs to process + run_nums: ["01", "02", "03", "04", "05", "06"] + # dummy scan of your experiment + dummy_scans: 6 +model: + # fMRI time-series space: volumetric or surface based + # valid options: EPI, MNI, fsnative, fsaverage. Currently only valid for fsnative and fsaverage + space: fsnative + + # if is surface based, choose which hemisphere, input is a list, by default it would be both hemisphere because it is fast + hemis: ['lh','rh'] + + # TODO: whether mask the image to conduct the GLM only on some part to increase power + mask_EPI: False + + # Valid option: fslabel, bimap.nii + # if mask EPI is true, then you need to choose mask method: either binary mask xx.nii or freesurfer label + mask_method: fslabel + + # The things between ?h.xx.label + # if space is freesurfer space, give the label name, TODO: if space is individual T1, + fslabel_name: votc + + # full path or simply the name of this file, + # if it is not full path, then you need to put it under the default place /basedir/bidsdir/derivatives/l1_glm/analysis-xxx/ + # if mask_methods is bimap.nii this option will be used + # please, put this nii file under the analysis folder of the l1_glm, + maskfile_nii_path: /path/to/nii + + # From the fMRIPrep command, align slice time correction to start of TR + slice_time_ref: 0.5 #0.5 or 0 + + # HRF model to use + hrf_model: spm + + # Do not high_pass since we use fMRIPrep's cosine regressors + drift_model: None # Do not high_pass since we use fMRIPrep's cosine regressors + + # Do not high_pass since we use fMRIPrep's cosine regressors + drift_order: 0 # Do not high_pass since we use fMRIPrep's cosine regressors + + # Do not high_pass since we use fMRIPrep's cosine regressors + high_pass: None # Do not high_pass since we use fMRIPrep's cosine regressors + + # Motion regressors to use + motion_regressors: [ + "framewise_displacement", + "rot_x", + "rot_y", + "rot_z", + "trans_x", + "trans_y", + "trans_z", + ] + + # currently don't know how to use the following three + use_acompcor: True + use_non_steady_state: True + use_consine_regressors: True + +contrast_groups: + All: + - adult + - child + - body + - limb + - ES_word + - ES_FF + - ES_CB + - ES_CS + - ES_SC + AllnoLimbs: + - adult + - child + - ES_word + - ES_FF + - ES_CB + - ES_CS + - ES_SC + AllnoWord: + - adult + - child + - body + - limb + - ES_FF + - ES_CB + - ES_CS + - ES_SC + AllnoWordnoLEX: + - adult + - child + - body + - limb + - ES_CB + - ES_SC + AllnoFace: + - body + - limb + - ES_word + - ES_FF + - ES_CB + - ES_CS + - ES_SC + LEXPER: + - ES_FF + - ES_CB + - ES_CS + - ES_SC + PER: + - ES_CB + - ES_SC + LEX: + - ES_CS + - ES_FF + Faces: + - adult + - child + Word: + - ES_word + Limbs: + - body + - limb + CB: + - ES_CB + SC: + - ES_SC + CS: + - ES_CS + FF: + - ES_FF + Adult: + - adult + Child: + - child + Limb: + - limb + Body: + - body +contrasts: + - AllvsNull + - PERvsNull + - LEXvsNull + - PERvsLEX + - WordvsLEX + - WordvsPER + - WordvsLEXPER + - WordvsAllnoWordnoLEX + - WordvsAllnoWord + - LEXvsAllnoWordnoLEX + - SCvsCB + - CSvsFF + - FacesvsNull + - FacesvsLEX + - FacesvsPER + - FacesvsLEXPER + - FacesvsAllnoFace + - AdultvsChild + - LimbsvsNull + - LimbsvsLEX + - LimbsvsPER + - LimbsvsLEXPER + - LimbsvsAllnoLimbs + - BodyvsLimb \ No newline at end of file diff --git a/example_configs/container_specific_example_configs/rtp2-pipeline/0.1.0_3.0.4rc21/example_config.json b/example_configs/container_specific_example_configs/rtp2-pipeline/0.1.0_3.0.4rc21/example_config.json deleted file mode 100644 index 88e2277..0000000 --- a/example_configs/container_specific_example_configs/rtp2-pipeline/0.1.0_3.0.4rc21/example_config.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "config": { - "subject_id": "Sxxx", - "reconall_options": "-autorecon2-cp -autorecon3", - "hippocampal_subfields": true, - "brainstem_structures": true, - "thalamic_nuclei": true, - "cerebellum": true, - "hcp": true, - "mni_rois": true, - "aparc2009": true, - "rois_in_output": true, - "neuropythy_analysis": true, - "run_gtmseg": true, - "force_ants": true, - "freesurfer_license_key": "lean.lecca.96@icloud.com 44700 *Cwx36m5MgglI FSz14PqJ1H/7Y" - } -} diff --git a/example_configs/container_specific_example_configs/rtp2-pipeline/0.1.0_3.0.4rc21/rtp2-pipeline_config.json b/example_configs/container_specific_example_configs/rtp2-pipeline/0.1.0_3.0.4rc21/rtp2-pipeline_config.json new file mode 100644 index 0000000..ed1fa1f --- /dev/null +++ b/example_configs/container_specific_example_configs/rtp2-pipeline/0.1.0_3.0.4rc21/rtp2-pipeline_config.json @@ -0,0 +1,28 @@ +{ + "config": { + "fiberWeighting": 1, + "numberOfNodes": 100, + "bval_for_fa": 1000, + "ET_angleValues": " 45, 45, 45, 25, 25, 25, 10, 10", + "ET_maxlength": "100,150,200,100,150,200,150,200", + "ET_minlength": 20, + "ET_numberFibers": 2000000, + "ET_track_stepSizeMm": 999, + "track_faMaskThresh": 0.2, + "track_faFodThresh": 0.05, + "get_vofparc": true, + "sift_runSift": true, + "sift_nFibers": 500000, + "life_runLife": false, + "life_discretization": 360, + "life_num_iterations": 10, + "life_saveOutput": false, + "life_test": false, + "life_writePDB": false, + "mrtrix_mrTrixAlgo": "iFOD2", + "mrtrix_autolmax": true, + "mrtrix_lmax": 6, + "mrtrix_useACT": false, + "save_output":true + } +} diff --git a/example_configs/container_specific_example_configs/rtp2-preproc/0.1.0_3.0.4rc31/example_config.json b/example_configs/container_specific_example_configs/rtp2-preproc/0.1.0_3.0.4rc31/example_config.json deleted file mode 100644 index 88e2277..0000000 --- a/example_configs/container_specific_example_configs/rtp2-preproc/0.1.0_3.0.4rc31/example_config.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "config": { - "subject_id": "Sxxx", - "reconall_options": "-autorecon2-cp -autorecon3", - "hippocampal_subfields": true, - "brainstem_structures": true, - "thalamic_nuclei": true, - "cerebellum": true, - "hcp": true, - "mni_rois": true, - "aparc2009": true, - "rois_in_output": true, - "neuropythy_analysis": true, - "run_gtmseg": true, - "force_ants": true, - "freesurfer_license_key": "lean.lecca.96@icloud.com 44700 *Cwx36m5MgglI FSz14PqJ1H/7Y" - } -} diff --git a/example_configs/container_specific_example_configs/rtp2-preproc/0.1.0_3.0.4rc31/rtp2-preproc_config.json b/example_configs/container_specific_example_configs/rtp2-preproc/0.1.0_3.0.4rc31/rtp2-preproc_config.json new file mode 100644 index 0000000..45d8bb6 --- /dev/null +++ b/example_configs/container_specific_example_configs/rtp2-preproc/0.1.0_3.0.4rc31/rtp2-preproc_config.json @@ -0,0 +1,26 @@ +{ + "config":{ + "denoise": true, + "degibbs": false, + "eddy": true, + "pe_dir": "PA", + "bias": false, + "bias_method": "ants", + "antsb": "[150,3]", + "antsc": "[200x200,1e-6]", + "antss": "2", + "ricn": false, + "norm": false, + "nval": 1000, + "doreslice": false, + "save_extra_output": false, + "anatalign": true, + "ants_dwi2anat_options": "-d 3 -t r", + "ants_qmap2anat_options": "-d 3 -t r", + "eddy_data_is_shelled": false, + "eddy_slm": "linear", + "eddy_niter": 5, + "eddy_repol": true, + "topup_lambda": "0.005,0.001,0.0001,0.000015,0.000005,0.0000005,0.00000005,0.0000000005,0.00000000001" + } +} diff --git a/example_configs/container_specific_example_configs/rtppreproc/1.1.3/example_config.json b/example_configs/container_specific_example_configs/rtppreproc/1.1.3/pedir_config.json similarity index 100% rename from example_configs/container_specific_example_configs/rtppreproc/1.1.3/example_config.json rename to example_configs/container_specific_example_configs/rtppreproc/1.1.3/pedir_config.json diff --git a/example_configs/container_specific_example_configs/rtppreproc/1.2.0-3.0.3/config.json b/example_configs/container_specific_example_configs/rtppreproc/1.2.0-3.0.3/acqd_config.json similarity index 100% rename from example_configs/container_specific_example_configs/rtppreproc/1.2.0-3.0.3/config.json rename to example_configs/container_specific_example_configs/rtppreproc/1.2.0-3.0.3/acqd_config.json diff --git a/example_configs/container_specific_example_configs/rtppreproc/1.2.0-3.0.3/example_config.json b/example_configs/container_specific_example_configs/rtppreproc/1.2.0-3.0.3/pedir_config.json similarity index 100% rename from example_configs/container_specific_example_configs/rtppreproc/1.2.0-3.0.3/example_config.json rename to example_configs/container_specific_example_configs/rtppreproc/1.2.0-3.0.3/pedir_config.json diff --git a/poetry.lock b/poetry.lock old mode 100755 new mode 100644 index 429372e..7de234f --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "argcomplete" -version = "3.3.0" +version = "3.4.0" description = "Bash tab completion for argparse" optional = false python-versions = ">=3.8" files = [ - {file = "argcomplete-3.3.0-py3-none-any.whl", hash = "sha256:c168c3723482c031df3c207d4ba8fa702717ccb9fc0bfe4117166c1f537b4a54"}, - {file = "argcomplete-3.3.0.tar.gz", hash = "sha256:fd03ff4a5b9e6580569d34b273f741e85cd9e072f3feeeee3eba4891c70eda62"}, + {file = "argcomplete-3.4.0-py3-none-any.whl", hash = "sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5"}, + {file = "argcomplete-3.4.0.tar.gz", hash = "sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f"}, ] [package.extras] @@ -25,6 +25,24 @@ files = [ {file = "astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e"}, ] +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + [[package]] name = "attrs" version = "23.2.0" @@ -115,6 +133,116 @@ d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "certifi" +version = "2024.7.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + [[package]] name = "click" version = "8.1.7" @@ -168,6 +296,69 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] development = ["black", "flake8", "mypy", "pytest", "types-colorama"] +[[package]] +name = "contourpy" +version = "1.2.1" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"}, + {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"}, + {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"}, + {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"}, + {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"}, + {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"}, + {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"}, + {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6834cbd983b19f06908b45bfc2dad6ac9479ae04abe923a275b5f48f1a186b"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d59e739ab0e3520e62a26c60707cc3ab0365d2f8fecea74bfe4de72dc56388f"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3db01f59fdcbce5b22afad19e390260d6d0222f35a1023d9adc5690a889364"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a12a813949e5066148712a0626895c26b2578874e4cc63160bb007e6df3436fe"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe0ccca550bb8e5abc22f530ec0466136379c01321fd94f30a22231e8a48d985"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1d59258c3c67c865435d8fbeb35f8c59b8bef3d6f46c1f29f6123556af28445"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f32c38afb74bd98ce26de7cc74a67b40afb7b05aae7b42924ea990d51e4dac02"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d31a63bc6e6d87f77d71e1abbd7387ab817a66733734883d1fc0021ed9bfa083"}, + {file = "contourpy-1.2.1-cp39-cp39-win32.whl", hash = "sha256:ddcb8581510311e13421b1f544403c16e901c4e8f09083c881fab2be80ee31ba"}, + {file = "contourpy-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:10a37ae557aabf2509c79715cd20b62e4c7c28b8cd62dd7d99e5ed3ce28c3fd9"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"}, + {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"}, +] + +[package.dependencies] +numpy = ">=1.20" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.8.0)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] + [[package]] name = "coverage" version = "6.5.0" @@ -248,42 +439,126 @@ files = [ coverage = ">=5,<7" packaging = ">=20.4" +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "cython" +version = "3.0.10" +description = "The Cython compiler for writing C extensions in the Python language." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "Cython-3.0.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e876272548d73583e90babda94c1299537006cad7a34e515a06c51b41f8657aa"}, + {file = "Cython-3.0.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adc377aa33c3309191e617bf675fdbb51ca727acb9dc1aa23fc698d8121f7e23"}, + {file = "Cython-3.0.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:401aba1869a57aba2922ccb656a6320447e55ace42709b504c2f8e8b166f46e1"}, + {file = "Cython-3.0.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:541fbe725d6534a90b93f8c577eb70924d664b227a4631b90a6e0506d1469591"}, + {file = "Cython-3.0.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:86998b01f6a6d48398df8467292c7637e57f7e3a2ca68655367f13f66fed7734"}, + {file = "Cython-3.0.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d092c0ddba7e9e530a5c5be4ac06db8360258acc27675d1fc86294a5dc8994c5"}, + {file = "Cython-3.0.10-cp310-cp310-win32.whl", hash = "sha256:3cffb666e649dba23810732497442fb339ee67ba4e0be1f0579991e83fcc2436"}, + {file = "Cython-3.0.10-cp310-cp310-win_amd64.whl", hash = "sha256:9ea31184c7b3a728ef1f81fccb161d8948c05aa86c79f63b74fb6f3ddec860ec"}, + {file = "Cython-3.0.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:051069638abfb076900b0c2bcb6facf545655b3f429e80dd14365192074af5a4"}, + {file = "Cython-3.0.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:712760879600907189c7d0d346851525545484e13cd8b787e94bfd293da8ccf0"}, + {file = "Cython-3.0.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38d40fa1324ac47c04483d151f5e092406a147eac88a18aec789cf01c089c3f2"}, + {file = "Cython-3.0.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bd49a3a9fdff65446a3e1c2bfc0ec85c6ce4c3cad27cd4ad7ba150a62b7fb59"}, + {file = "Cython-3.0.10-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e8df79b596633b8295eaa48b1157d796775c2bb078f32267d32f3001b687f2fd"}, + {file = "Cython-3.0.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bcc9795990e525c192bc5c0775e441d7d56d7a7d02210451e9e13c0448dba51b"}, + {file = "Cython-3.0.10-cp311-cp311-win32.whl", hash = "sha256:09f2000041db482cad3bfce94e1fa3a4c82b0e57390a164c02566cbbda8c4f12"}, + {file = "Cython-3.0.10-cp311-cp311-win_amd64.whl", hash = "sha256:3919a55ec9b6c7db6f68a004c21c05ed540c40dbe459ced5d801d5a1f326a053"}, + {file = "Cython-3.0.10-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8f2864ab5fcd27a346f0b50f901ebeb8f60b25a60a575ccfd982e7f3e9674914"}, + {file = "Cython-3.0.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:407840c56385b9c085826fe300213e0e76ba15d1d47daf4b58569078ecb94446"}, + {file = "Cython-3.0.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a036d00caa73550a3a976432ef21c1e3fa12637e1616aab32caded35331ae96"}, + {file = "Cython-3.0.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc6a0e7e23a96dec3f3c9d39690d4281beabd5297855140d0d30855f950275e"}, + {file = "Cython-3.0.10-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5e14a8c6a8157d2b0cdc2e8e3444905d20a0e78e19d2a097e89fb8b04b51f6b"}, + {file = "Cython-3.0.10-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f8a2b8fa0fd8358bccb5f3304be563c4750aae175100463d212d5ea0ec74cbe0"}, + {file = "Cython-3.0.10-cp312-cp312-win32.whl", hash = "sha256:2d29e617fd23cf4b83afe8f93f2966566c9f565918ad1e86a4502fe825cc0a79"}, + {file = "Cython-3.0.10-cp312-cp312-win_amd64.whl", hash = "sha256:6c5af936940a38c300977b81598d9c0901158f220a58c177820e17e1774f1cf1"}, + {file = "Cython-3.0.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:5f465443917d5c0f69825fca3b52b64c74ac3de0143b1fff6db8ba5b48c9fb4a"}, + {file = "Cython-3.0.10-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fadb84193c25641973666e583df8df4e27c52cdc05ddce7c6f6510d690ba34a"}, + {file = "Cython-3.0.10-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fa9e7786083b6aa61594c16979d621b62e61fcd9c2edd4761641b95c7fb34b2"}, + {file = "Cython-3.0.10-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4780d0f98ce28191c4d841c4358b5d5e79d96520650910cd59904123821c52d"}, + {file = "Cython-3.0.10-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:32fbad02d1189be75eb96456d9c73f5548078e5338d8fa153ecb0115b6ee279f"}, + {file = "Cython-3.0.10-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:90e2f514fc753b55245351305a399463103ec18666150bb1c36779b9862388e9"}, + {file = "Cython-3.0.10-cp36-cp36m-win32.whl", hash = "sha256:a9c976e9ec429539a4367cb4b24d15a1e46b925976f4341143f49f5f161171f5"}, + {file = "Cython-3.0.10-cp36-cp36m-win_amd64.whl", hash = "sha256:a9bb402674788a7f4061aeef8057632ec440123e74ed0fb425308a59afdfa10e"}, + {file = "Cython-3.0.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:206e803598010ecc3813db8748ed685f7beeca6c413f982df9f8a505fce56563"}, + {file = "Cython-3.0.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15b6d397f4ee5ad54e373589522af37935a32863f1b23fa8c6922adf833e28e2"}, + {file = "Cython-3.0.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a181144c2f893ed8e6a994d43d0b96300bc99873f21e3b7334ca26c61c37b680"}, + {file = "Cython-3.0.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74b700d6a793113d03fb54b63bdbadba6365379424bac7c0470605672769260"}, + {file = "Cython-3.0.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:076e9fd4e0ca33c5fa00a7479180dbfb62f17fe928e2909f82da814536e96d2b"}, + {file = "Cython-3.0.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:269f06e6961e8591d56e30b46e1a51b6ccb42cab04c29fa3b30d3e8723485fb4"}, + {file = "Cython-3.0.10-cp37-cp37m-win32.whl", hash = "sha256:d4e83a8ceff7af60064da4ccfce0ac82372544dd5392f1b350c34f1b04d0fae6"}, + {file = "Cython-3.0.10-cp37-cp37m-win_amd64.whl", hash = "sha256:40fac59c3a7fbcd9c25aea64c342c890a5e2270ce64a1525e840807800167799"}, + {file = "Cython-3.0.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f43a58bf2434870d2fc42ac2e9ff8138c9e00c6251468de279d93fa279e9ba3b"}, + {file = "Cython-3.0.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e9a885ec63d3955a08cefc4eec39fefa9fe14989c6e5e2382bd4aeb6bdb9bc3"}, + {file = "Cython-3.0.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acfbe0fff364d54906058fc61f2393f38cd7fa07d344d80923937b87e339adcf"}, + {file = "Cython-3.0.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8adcde00a8a88fab27509b558cd8c2959ab0c70c65d3814cfea8c68b83fa6dcd"}, + {file = "Cython-3.0.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2c9c1e3e78909488f3b16fabae02308423fa6369ed96ab1e250807d344cfffd7"}, + {file = "Cython-3.0.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc6e0faf5b57523b073f0cdefadcaef3a51235d519a0594865925cadb3aeadf0"}, + {file = "Cython-3.0.10-cp38-cp38-win32.whl", hash = "sha256:35f6ede7c74024ed1982832ae61c9fad7cf60cc3f5b8c6a63bb34e38bc291936"}, + {file = "Cython-3.0.10-cp38-cp38-win_amd64.whl", hash = "sha256:950c0c7b770d2a7cec74fb6f5ccc321d0b51d151f48c075c0d0db635a60ba1b5"}, + {file = "Cython-3.0.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:077b61ee789e48700e25d4a16daa4258b8e65167136e457174df400cf9b4feab"}, + {file = "Cython-3.0.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f1f8bba9d8f37c0cffc934792b4ac7c42d0891077127c11deebe9fa0a0f7e4"}, + {file = "Cython-3.0.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:651a15a8534ebfb9b58cb0b87c269c70984b6f9c88bfe65e4f635f0e3f07dfcd"}, + {file = "Cython-3.0.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d10fc9aa82e5e53a0b7fd118f9771199cddac8feb4a6d8350b7d4109085aa775"}, + {file = "Cython-3.0.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f610964ab252a83e573a427e28b103e2f1dd3c23bee54f32319f9e73c3c5499"}, + {file = "Cython-3.0.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c9c4c4f3ab8f8c02817b0e16e8fa7b8cc880f76e9b63fe9c010e60c1a6c2b13"}, + {file = "Cython-3.0.10-cp39-cp39-win32.whl", hash = "sha256:0bac3ccdd4e03924028220c62ae3529e17efa8ca7e9df9330de95de02f582b26"}, + {file = "Cython-3.0.10-cp39-cp39-win_amd64.whl", hash = "sha256:81f356c1c8c0885b8435bfc468025f545c5d764aa9c75ab662616dd1193c331e"}, + {file = "Cython-3.0.10-py2.py3-none-any.whl", hash = "sha256:fcbb679c0b43514d591577fd0d20021c55c240ca9ccafbdb82d3fb95e5edfee2"}, + {file = "Cython-3.0.10.tar.gz", hash = "sha256:dcc96739331fb854dcf503f94607576cfe8488066c61ca50dfd55836f132de99"}, +] + [[package]] name = "dask" -version = "2022.7.0" +version = "2024.5.2" description = "Parallel PyData with Task Scheduling" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "dask-2022.7.0-py3-none-any.whl", hash = "sha256:3301725a654796ba5942536646689a37c27513c89724b34a40281bb6848b2963"}, - {file = "dask-2022.7.0.tar.gz", hash = "sha256:50ad63035d6429cfa71d4c1832888a668e4260b1ef11163c795edc0ed177b4b3"}, + {file = "dask-2024.5.2-py3-none-any.whl", hash = "sha256:acc2cfe41d9e0151c216ac40396dbe34df13bc3d8c51dfece190349e4f2243af"}, + {file = "dask-2024.5.2.tar.gz", hash = "sha256:5c9722c44d0195e78b6e54197aa3302e6fcaaac2310fd3014560bcb86253dcb3"}, ] [package.dependencies] -cloudpickle = ">=1.1.1" -fsspec = ">=0.6.0" +click = ">=8.1" +cloudpickle = ">=1.5.0" +fsspec = ">=2021.09.0" +importlib-metadata = {version = ">=4.13.0", markers = "python_version < \"3.12\""} packaging = ">=20.0" -partd = ">=0.3.10" +partd = ">=1.2.0" pyyaml = ">=5.3.1" -toolz = ">=0.8.2" +toolz = ">=0.10.0" [package.extras] -array = ["numpy (>=1.18)"] -complete = ["bokeh (>=2.4.2)", "distributed (==2022.7.0)", "jinja2", "numpy (>=1.18)", "pandas (>=1.0)"] -dataframe = ["numpy (>=1.18)", "pandas (>=1.0)"] -diagnostics = ["bokeh (>=2.4.2)", "jinja2"] -distributed = ["distributed (==2022.7.0)"] -test = ["pre-commit", "pytest", "pytest-rerunfailures", "pytest-xdist"] +array = ["numpy (>=1.21)"] +complete = ["dask[array,dataframe,diagnostics,distributed]", "lz4 (>=4.3.2)", "pyarrow (>=7.0)", "pyarrow-hotfix"] +dataframe = ["dask-expr (>=1.1,<1.2)", "dask[array]", "pandas (>=1.3)"] +diagnostics = ["bokeh (>=2.4.2)", "jinja2 (>=2.10.3)"] +distributed = ["distributed (==2024.5.2)"] +test = ["pandas[test]", "pre-commit", "pytest", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist"] [[package]] name = "dask-jobqueue" -version = "0.8.1" +version = "0.8.5" description = "Deploy Dask on job queuing systems like PBS, Slurm, SGE or LSF" optional = false python-versions = ">=3.8" files = [ - {file = "dask-jobqueue-0.8.1.tar.gz", hash = "sha256:16fd1b646a073ad3de75dde12a0dfe529b836f21a3bdbcee2a88bef24e9112a7"}, - {file = "dask_jobqueue-0.8.1-py2.py3-none-any.whl", hash = "sha256:22f7435bbda34feb75cd7abc4b3175309cbdb9e8dadb02174d37aba09944abe9"}, + {file = "dask-jobqueue-0.8.5.tar.gz", hash = "sha256:f6923f9d7ff894b96efbf706118b2cd37fd37751d567e91c22dfd3e2eaa93202"}, + {file = "dask_jobqueue-0.8.5-py2.py3-none-any.whl", hash = "sha256:96a51083b93e5e66354bdb337840663202e45b20930071edeb41af07bbd5af26"}, ] [package.dependencies] @@ -293,6 +568,17 @@ distributed = ">=2022.02.0" [package.extras] test = ["cryptography", "pytest", "pytest-asyncio"] +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + [[package]] name = "distlib" version = "0.3.8" @@ -306,31 +592,31 @@ files = [ [[package]] name = "distributed" -version = "2022.7.0" +version = "2024.5.2" description = "Distributed scheduler for Dask" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "distributed-2022.7.0-py3-none-any.whl", hash = "sha256:1dd0d7833909024ced4814d16118a75623dca52c74578c204f82794b54c4c7b9"}, - {file = "distributed-2022.7.0.tar.gz", hash = "sha256:e68aba8be3e20e5d1120abcac841909ff18021708a8412afa58ff153c4fab0ac"}, + {file = "distributed-2024.5.2-py3-none-any.whl", hash = "sha256:c0fd59d5c34179d9c9b5dc5acb42a00a06d163107b79f66c2dc73e9479a92286"}, + {file = "distributed-2024.5.2.tar.gz", hash = "sha256:4cee41093e98340d04d9254012c7d521065f64b3f33546dd0b02b00becb41e21"}, ] [package.dependencies] -click = ">=6.6" +click = ">=8.0" cloudpickle = ">=1.5.0" -dask = "2022.7.0" -jinja2 = "*" +dask = "2024.5.2" +jinja2 = ">=2.10.3" locket = ">=1.0.0" -msgpack = ">=0.6.0" +msgpack = ">=1.0.0" packaging = ">=20.0" -psutil = ">=5.0" -pyyaml = "*" -sortedcontainers = "<2.0.0 || >2.0.0,<2.0.1 || >2.0.1" +psutil = ">=5.7.2" +pyyaml = ">=5.3.1" +sortedcontainers = ">=2.0.5" tblib = ">=1.6.0" -toolz = ">=0.8.2" -tornado = ">=6.0.3,<6.2" -urllib3 = "*" -zict = ">=0.1.3" +toolz = ">=0.10.0" +tornado = ">=6.0.4" +urllib3 = ">=1.24.3" +zict = ">=3.0.0" [[package]] name = "docopt" @@ -344,48 +630,62 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "executing" +version = "2.0.1" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.5" +files = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + [[package]] name = "filelock" -version = "3.15.1" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.1-py3-none-any.whl", hash = "sha256:71b3102950e91dfc1bb4209b64be4dc8854f40e5f534428d8684f953ac847fac"}, - {file = "filelock-3.15.1.tar.gz", hash = "sha256:58a2549afdf9e02e10720eaa4d4470f56386d7a6f72edd7d0596337af8ed7ad8"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" -version = "7.0.0" +version = "7.1.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, - {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, + {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, + {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" +pycodestyle = ">=2.12.0,<2.13.0" pyflakes = ">=3.2.0,<3.3.0" [[package]] @@ -422,17 +722,82 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", [[package]] name = "flake8-comprehensions" -version = "3.14.0" +version = "3.15.0" description = "A flake8 plugin to help you write better list/set/dict comprehensions." optional = false python-versions = ">=3.8" files = [ - {file = "flake8_comprehensions-3.14.0-py3-none-any.whl", hash = "sha256:7b9d07d94aa88e62099a6d1931ddf16c344d4157deedf90fe0d8ee2846f30e97"}, - {file = "flake8_comprehensions-3.14.0.tar.gz", hash = "sha256:81768c61bfc064e1a06222df08a2580d97de10cb388694becaf987c331c6c0cf"}, + {file = "flake8_comprehensions-3.15.0-py3-none-any.whl", hash = "sha256:b7e027bbb52be2ceb779ee12484cdeef52b0ad3c1fcb8846292bdb86d3034681"}, + {file = "flake8_comprehensions-3.15.0.tar.gz", hash = "sha256:923c22603e0310376a6b55b03efebdc09753c69f2d977755cba8bb73458a5d4d"}, ] [package.dependencies] -flake8 = ">=3.0,<3.2.0 || >3.2.0" +flake8 = ">=3,<3.2 || >3.2" + +[[package]] +name = "fonttools" +version = "4.53.1" +description = "Tools to manipulate font files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.53.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0679a30b59d74b6242909945429dbddb08496935b82f91ea9bf6ad240ec23397"}, + {file = "fonttools-4.53.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8bf06b94694251861ba7fdeea15c8ec0967f84c3d4143ae9daf42bbc7717fe3"}, + {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b96cd370a61f4d083c9c0053bf634279b094308d52fdc2dd9a22d8372fdd590d"}, + {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1c7c5aa18dd3b17995898b4a9b5929d69ef6ae2af5b96d585ff4005033d82f0"}, + {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e013aae589c1c12505da64a7d8d023e584987e51e62006e1bb30d72f26522c41"}, + {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9efd176f874cb6402e607e4cc9b4a9cd584d82fc34a4b0c811970b32ba62501f"}, + {file = "fonttools-4.53.1-cp310-cp310-win32.whl", hash = "sha256:c8696544c964500aa9439efb6761947393b70b17ef4e82d73277413f291260a4"}, + {file = "fonttools-4.53.1-cp310-cp310-win_amd64.whl", hash = "sha256:8959a59de5af6d2bec27489e98ef25a397cfa1774b375d5787509c06659b3671"}, + {file = "fonttools-4.53.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:da33440b1413bad53a8674393c5d29ce64d8c1a15ef8a77c642ffd900d07bfe1"}, + {file = "fonttools-4.53.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ff7e5e9bad94e3a70c5cd2fa27f20b9bb9385e10cddab567b85ce5d306ea923"}, + {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6e7170d675d12eac12ad1a981d90f118c06cf680b42a2d74c6c931e54b50719"}, + {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee32ea8765e859670c4447b0817514ca79054463b6b79784b08a8df3a4d78e3"}, + {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e08f572625a1ee682115223eabebc4c6a2035a6917eac6f60350aba297ccadb"}, + {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b21952c092ffd827504de7e66b62aba26fdb5f9d1e435c52477e6486e9d128b2"}, + {file = "fonttools-4.53.1-cp311-cp311-win32.whl", hash = "sha256:9dfdae43b7996af46ff9da520998a32b105c7f098aeea06b2226b30e74fbba88"}, + {file = "fonttools-4.53.1-cp311-cp311-win_amd64.whl", hash = "sha256:d4d0096cb1ac7a77b3b41cd78c9b6bc4a400550e21dc7a92f2b5ab53ed74eb02"}, + {file = "fonttools-4.53.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d92d3c2a1b39631a6131c2fa25b5406855f97969b068e7e08413325bc0afba58"}, + {file = "fonttools-4.53.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b3c8ebafbee8d9002bd8f1195d09ed2bd9ff134ddec37ee8f6a6375e6a4f0e8"}, + {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f029c095ad66c425b0ee85553d0dc326d45d7059dbc227330fc29b43e8ba60"}, + {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f5e6c3510b79ea27bb1ebfcc67048cde9ec67afa87c7dd7efa5c700491ac7f"}, + {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f677ce218976496a587ab17140da141557beb91d2a5c1a14212c994093f2eae2"}, + {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9e6ceba2a01b448e36754983d376064730690401da1dd104ddb543519470a15f"}, + {file = "fonttools-4.53.1-cp312-cp312-win32.whl", hash = "sha256:791b31ebbc05197d7aa096bbc7bd76d591f05905d2fd908bf103af4488e60670"}, + {file = "fonttools-4.53.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ed170b5e17da0264b9f6fae86073be3db15fa1bd74061c8331022bca6d09bab"}, + {file = "fonttools-4.53.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c818c058404eb2bba05e728d38049438afd649e3c409796723dfc17cd3f08749"}, + {file = "fonttools-4.53.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:651390c3b26b0c7d1f4407cad281ee7a5a85a31a110cbac5269de72a51551ba2"}, + {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54f1bba2f655924c1138bbc7fa91abd61f45c68bd65ab5ed985942712864bbb"}, + {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9cd19cf4fe0595ebdd1d4915882b9440c3a6d30b008f3cc7587c1da7b95be5f"}, + {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2af40ae9cdcb204fc1d8f26b190aa16534fcd4f0df756268df674a270eab575d"}, + {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:35250099b0cfb32d799fb5d6c651220a642fe2e3c7d2560490e6f1d3f9ae9169"}, + {file = "fonttools-4.53.1-cp38-cp38-win32.whl", hash = "sha256:f08df60fbd8d289152079a65da4e66a447efc1d5d5a4d3f299cdd39e3b2e4a7d"}, + {file = "fonttools-4.53.1-cp38-cp38-win_amd64.whl", hash = "sha256:7b6b35e52ddc8fb0db562133894e6ef5b4e54e1283dff606fda3eed938c36fc8"}, + {file = "fonttools-4.53.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75a157d8d26c06e64ace9df037ee93a4938a4606a38cb7ffaf6635e60e253b7a"}, + {file = "fonttools-4.53.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4824c198f714ab5559c5be10fd1adf876712aa7989882a4ec887bf1ef3e00e31"}, + {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:becc5d7cb89c7b7afa8321b6bb3dbee0eec2b57855c90b3e9bf5fb816671fa7c"}, + {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ec3fb43befb54be490147b4a922b5314e16372a643004f182babee9f9c3407"}, + {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:73379d3ffdeecb376640cd8ed03e9d2d0e568c9d1a4e9b16504a834ebadc2dfb"}, + {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:02569e9a810f9d11f4ae82c391ebc6fb5730d95a0657d24d754ed7763fb2d122"}, + {file = "fonttools-4.53.1-cp39-cp39-win32.whl", hash = "sha256:aae7bd54187e8bf7fd69f8ab87b2885253d3575163ad4d669a262fe97f0136cb"}, + {file = "fonttools-4.53.1-cp39-cp39-win_amd64.whl", hash = "sha256:e5b708073ea3d684235648786f5f6153a48dc8762cdfe5563c57e80787c29fbb"}, + {file = "fonttools-4.53.1-py3-none-any.whl", hash = "sha256:f1f8758a2ad110bd6432203a344269f445a2907dc24ef6bccfd0ac4e14e0d71d"}, + {file = "fonttools-4.53.1.tar.gz", hash = "sha256:e128778a8e9bc11159ce5447f76766cefbd876f44bd79aff030287254e4752c4"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "pycairo", "scipy"] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] [[package]] name = "formulaic" @@ -460,13 +825,13 @@ calculus = ["sympy (>=1.3,<1.10)"] [[package]] name = "fsspec" -version = "2024.6.0" +version = "2024.6.1" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2024.6.0-py3-none-any.whl", hash = "sha256:58d7122eb8a1a46f7f13453187bfea4972d66bf01618d37366521b1998034cee"}, - {file = "fsspec-2024.6.0.tar.gz", hash = "sha256:f579960a56e6d8038a9efc8f9c77279ec12e6299aa86b0769a7e9c46b94527c2"}, + {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, + {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, ] [package.extras] @@ -497,6 +862,43 @@ test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe, test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] tqdm = ["tqdm"] +[[package]] +name = "gdist" +version = "2.1.0" +description = "Compute geodesic distances" +optional = false +python-versions = "*" +files = [ + {file = "gdist-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff7551ee9b5c7716fc077068e2b52890ac9cf3ef249582acde895f7119d4040f"}, + {file = "gdist-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ace46774ea8fa3d75a0d775951a4c3bacd3da69b06f9f17d805474e95f71b1b0"}, + {file = "gdist-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91b3280f6d1fdef415232496b30397a6c9dcc8d472806bffe610a26313ce03e0"}, + {file = "gdist-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:8154e208b4a30dff370fdcfda11c133748d229390f54d1f114b0c7c54431a255"}, + {file = "gdist-2.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f08396d8b0dd89886dcf8b75f9eeb5390dac27efa7703ba1a1b724754951afa6"}, + {file = "gdist-2.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:29d7da6d285b6980cc1c30c2cc9263c03c4bdfe158b929f33ddd7993219042b3"}, + {file = "gdist-2.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9271466f4fab9881c57f7825d66eba069ee8df32764021474e8348667abb49b7"}, + {file = "gdist-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0198e7410a91461052d747ac6c5c26515a987789ec9808c938fa26790616ba3b"}, + {file = "gdist-2.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ad481522be56672c2461d215985f25cf471a192f2723269790241e0d4e0beab"}, + {file = "gdist-2.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfe8d8ff3f8ef7c3862300f24cceb348b7cd1fb71b6ed4011a3d17ecd37b3704"}, + {file = "gdist-2.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a195568f72ff859b25c900c7f00d92f18bd063bf13948e8ad8287061b89c72cd"}, + {file = "gdist-2.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc3c0ca7168be4ec7256db96a123bc4b64b00fa0f1fc70bf87bbef44cd344b3e"}, + {file = "gdist-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70723f620fe9d339075b8dce1662b48a8bb9c47df84d7b6850072ff0179af128"}, + {file = "gdist-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1131123f24964ee542806cb6dbb56744c5aa9ba041cd9ee1de255ecddb6cf96a"}, + {file = "gdist-2.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d30b4b5e50a7d191bdc5fa9b7701684a18b1a0075f6b649daa8342d36efa7104"}, + {file = "gdist-2.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a40a2fd8335e981c12debe2091775b54306300a415c1f253dff3fb3eb2990ba"}, + {file = "gdist-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54262561068f3d28c76f21de850e8adb18751aac066ff86e671da12262350286"}, + {file = "gdist-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:08a847153353a296188bd01946242cd0e0eabe35594db1a8cdef8f9258b98162"}, + {file = "gdist-2.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:56af0eb5828fd23d28c77c4d09e6357fe024720b3dcea45ecf3dd918fc3c81c8"}, + {file = "gdist-2.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:599a14363ec9570d53eb17d9e0b3a443fea774a0c3044161727cc732725dfd27"}, + {file = "gdist-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b075bfbef6781c80c256184b9b61c5d8ac17384461ef43e1cf3c2c05a20bb99"}, + {file = "gdist-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:c28ce89c4b9c8d8e987027bb3a8add9877d19a8ede0bf7167d14d2fd023d85a2"}, + {file = "gdist-2.1.0.tar.gz", hash = "sha256:f8c6b25f9ab7c626cd683c23c7886f4a5f5a4aae2e1d3931ae865157d4e33fe8"}, +] + +[package.dependencies] +cython = "*" +numpy = "*" +scipy = "*" + [[package]] name = "greenlet" version = "3.0.3" @@ -568,6 +970,36 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "importlib-metadata" +version = "8.0.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -590,6 +1022,63 @@ files = [ {file = "interface_meta-1.3.0.tar.gz", hash = "sha256:8a4493f8bdb73fb9655dcd5115bc897e207319e36c8835f39c516a2d7e9d79a1"}, ] +[[package]] +name = "ipython" +version = "8.26.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.10" +files = [ + {file = "ipython-8.26.0-py3-none-any.whl", hash = "sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff"}, + {file = "ipython-8.26.0.tar.gz", hash = "sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt-toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5.13.0" +typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} + +[package.extras] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] +kernel = ["ipykernel"] +matplotlib = ["matplotlib"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + [[package]] name = "jinja2" version = "3.1.4" @@ -607,6 +1096,130 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "joblib" +version = "1.4.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, + {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, +] + +[[package]] +name = "kiwisolver" +version = "1.4.5" +description = "A fast implementation of the Cassowary constraint solver" +optional = false +python-versions = ">=3.7" +files = [ + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, + {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, +] + [[package]] name = "locket" version = "1.0.0" @@ -618,6 +1231,164 @@ files = [ {file = "locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632"}, ] +[[package]] +name = "lxml" +version = "5.2.2" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +files = [ + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, + {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, + {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, + {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, + {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, + {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, + {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, + {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, + {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, + {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, + {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, + {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, + {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, + {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, + {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, + {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, + {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, + {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, + {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.10)"] + [[package]] name = "markupsafe" version = "2.1.5" @@ -687,6 +1458,58 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "matplotlib" +version = "3.9.0" +description = "Python plotting package" +optional = false +python-versions = ">=3.9" +files = [ + {file = "matplotlib-3.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2bcee1dffaf60fe7656183ac2190bd630842ff87b3153afb3e384d966b57fe56"}, + {file = "matplotlib-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3f988bafb0fa39d1074ddd5bacd958c853e11def40800c5824556eb630f94d3b"}, + {file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe428e191ea016bb278758c8ee82a8129c51d81d8c4bc0846c09e7e8e9057241"}, + {file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaf3978060a106fab40c328778b148f590e27f6fa3cd15a19d6892575bce387d"}, + {file = "matplotlib-3.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e7f03e5cbbfacdd48c8ea394d365d91ee8f3cae7e6ec611409927b5ed997ee4"}, + {file = "matplotlib-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:13beb4840317d45ffd4183a778685e215939be7b08616f431c7795276e067463"}, + {file = "matplotlib-3.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:063af8587fceeac13b0936c42a2b6c732c2ab1c98d38abc3337e430e1ff75e38"}, + {file = "matplotlib-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a2fa6d899e17ddca6d6526cf6e7ba677738bf2a6a9590d702c277204a7c6152"}, + {file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550cdda3adbd596078cca7d13ed50b77879104e2e46392dcd7c75259d8f00e85"}, + {file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cce0f31b351e3551d1f3779420cf8f6ec0d4a8cf9c0237a3b549fd28eb4abb"}, + {file = "matplotlib-3.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c53aeb514ccbbcbab55a27f912d79ea30ab21ee0531ee2c09f13800efb272674"}, + {file = "matplotlib-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5be985db2596d761cdf0c2eaf52396f26e6a64ab46bd8cd810c48972349d1be"}, + {file = "matplotlib-3.9.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c79f3a585f1368da6049318bdf1f85568d8d04b2e89fc24b7e02cc9b62017382"}, + {file = "matplotlib-3.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bdd1ecbe268eb3e7653e04f451635f0fb0f77f07fd070242b44c076c9106da84"}, + {file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e85a1a6d732f645f1403ce5e6727fd9418cd4574521d5803d3d94911038e5"}, + {file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a490715b3b9984fa609116481b22178348c1a220a4499cda79132000a79b4db"}, + {file = "matplotlib-3.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8146ce83cbc5dc71c223a74a1996d446cd35cfb6a04b683e1446b7e6c73603b7"}, + {file = "matplotlib-3.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:d91a4ffc587bacf5c4ce4ecfe4bcd23a4b675e76315f2866e588686cc97fccdf"}, + {file = "matplotlib-3.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:616fabf4981a3b3c5a15cd95eba359c8489c4e20e03717aea42866d8d0465956"}, + {file = "matplotlib-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd53c79fd02f1c1808d2cfc87dd3cf4dbc63c5244a58ee7944497107469c8d8a"}, + {file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06a478f0d67636554fa78558cfbcd7b9dba85b51f5c3b5a0c9be49010cf5f321"}, + {file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81c40af649d19c85f8073e25e5806926986806fa6d54be506fbf02aef47d5a89"}, + {file = "matplotlib-3.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52146fc3bd7813cc784562cb93a15788be0b2875c4655e2cc6ea646bfa30344b"}, + {file = "matplotlib-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:0fc51eaa5262553868461c083d9adadb11a6017315f3a757fc45ec6ec5f02888"}, + {file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bd4f2831168afac55b881db82a7730992aa41c4f007f1913465fb182d6fb20c0"}, + {file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:290d304e59be2b33ef5c2d768d0237f5bd132986bdcc66f80bc9bcc300066a03"}, + {file = "matplotlib-3.9.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff2e239c26be4f24bfa45860c20ffccd118d270c5b5d081fa4ea409b5469fcd"}, + {file = "matplotlib-3.9.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:af4001b7cae70f7eaacfb063db605280058246de590fa7874f00f62259f2df7e"}, + {file = "matplotlib-3.9.0.tar.gz", hash = "sha256:e6d29ea6c19e34b30fb7d88b7081f869a03014f66fe06d62cc77d5a6ea88ed7a"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +kiwisolver = ">=1.3.1" +numpy = ">=1.23" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[package.extras] +dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6)", "setuptools (>=64)", "setuptools_scm (>=7)"] + [[package]] name = "mccabe" version = "0.7.0" @@ -776,33 +1599,63 @@ files = [ [[package]] name = "nibabel" -version = "5.1.0" +version = "5.2.1" description = "Access a multitude of neuroimaging data formats" optional = false python-versions = ">=3.8" files = [ - {file = "nibabel-5.1.0-py3-none-any.whl", hash = "sha256:b3deb8130c835b9d26e80880b0d5e443d9e3f30972b3b0302dd2fafa3ca629f8"}, - {file = "nibabel-5.1.0.tar.gz", hash = "sha256:ce73ca5e957209e7219a223cb71f77235c9df2acf4d3f27f861ba38e9481ac53"}, + {file = "nibabel-5.2.1-py3-none-any.whl", hash = "sha256:2cbbc22985f7f9d39d050df47249771dfb8d48447f5e7a993177e4cabfe047f0"}, + {file = "nibabel-5.2.1.tar.gz", hash = "sha256:b6c80b2e728e4bc2b65f1142d9b8d2287a9102a8bf8477e115ef0d8334559975"}, ] [package.dependencies] -numpy = ">=1.19" +numpy = ">=1.20" packaging = ">=17" [package.extras] -all = ["nibabel[dev,dicomfs,doc,minc2,spm,style,test,zstd]"] -dev = ["gitpython", "nibabel[style]", "twine"] +all = ["nibabel[dicomfs,minc2,spm,zstd]"] +dev = ["tox"] dicom = ["pydicom (>=1.0.0)"] dicomfs = ["nibabel[dicom]", "pillow"] -doc = ["matplotlib (>=1.5.3)", "numpydoc", "sphinx (>=5.3,<6.0)", "texext", "tomli"] -doctest = ["nibabel[doc,test]"] +doc = ["matplotlib (>=1.5.3)", "numpydoc", "sphinx", "texext", "tomli"] +doctest = ["tox"] minc2 = ["h5py"] spm = ["scipy"] -style = ["blue", "flake8", "isort"] -test = ["coverage", "pytest (!=5.3.4)", "pytest-cov", "pytest-doctestplus", "pytest-httpserver", "pytest-xdist"] -typing = ["importlib-resources", "mypy", "pydicom", "pytest", "pyzstd", "types-pillow", "types-setuptools"] +style = ["tox"] +test = ["pytest", "pytest-cov", "pytest-doctestplus", "pytest-httpserver", "pytest-xdist"] +typing = ["tox"] zstd = ["pyzstd (>=0.14.3)"] +[[package]] +name = "nilearn" +version = "0.10.4" +description = "Statistical learning for neuroimaging in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "nilearn-0.10.4-py3-none-any.whl", hash = "sha256:48a800e6117ebea8a70b2f0a080b16109e225731605c9243ff103e9a27bf9cac"}, + {file = "nilearn-0.10.4.tar.gz", hash = "sha256:9450bd56a776d997b324f45dd18bf96e89bd8d80160974fcc759333fbaea35c2"}, +] + +[package.dependencies] +joblib = ">=1.0.0" +lxml = "*" +nibabel = ">=4.0.0" +numpy = ">=1.19.0" +packaging = "*" +pandas = ">=1.1.5" +requests = ">=2.25.0" +scikit-learn = ">=1.0.0" +scipy = ">=1.8.0" + +[package.extras] +dev = ["nilearn[doc,plotting,style,test]", "pre-commit", "tox"] +doc = ["furo", "memory-profiler", "myst-parser", "nilearn[plotting]", "numpydoc", "ruamel-yaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx-gallery", "sphinxcontrib-bibtex", "sphinxext-opengraph"] +plotly = ["nilearn[plotting]"] +plotting = ["kaleido", "kaleido (==0.1.0.post1)", "matplotlib (>=3.3.0)", "plotly"] +style = ["black", "blacken-docs", "codespell", "flake8", "flake8-docstrings", "flake8-functions", "flake8-use-fstring", "flynt", "isort", "tomli"] +test = ["coverage", "pytest (>=6.0.0)", "pytest-cov"] + [[package]] name = "nox" version = "2024.4.15" @@ -857,95 +1710,146 @@ docopt = ">=0.6.2" [[package]] name = "numpy" -version = "1.23.5" -description = "NumPy is the fundamental package for array computing with Python." +version = "1.26.4" +description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "numpy-1.23.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c88793f78fca17da0145455f0d7826bcb9f37da4764af27ac945488116efe63"}, - {file = "numpy-1.23.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e9f4c4e51567b616be64e05d517c79a8a22f3606499941d97bb76f2ca59f982d"}, - {file = "numpy-1.23.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7903ba8ab592b82014713c491f6c5d3a1cde5b4a3bf116404e08f5b52f6daf43"}, - {file = "numpy-1.23.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e05b1c973a9f858c74367553e236f287e749465f773328c8ef31abe18f691e1"}, - {file = "numpy-1.23.5-cp310-cp310-win32.whl", hash = "sha256:522e26bbf6377e4d76403826ed689c295b0b238f46c28a7251ab94716da0b280"}, - {file = "numpy-1.23.5-cp310-cp310-win_amd64.whl", hash = "sha256:dbee87b469018961d1ad79b1a5d50c0ae850000b639bcb1b694e9981083243b6"}, - {file = "numpy-1.23.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ce571367b6dfe60af04e04a1834ca2dc5f46004ac1cc756fb95319f64c095a96"}, - {file = "numpy-1.23.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56e454c7833e94ec9769fa0f86e6ff8e42ee38ce0ce1fa4cbb747ea7e06d56aa"}, - {file = "numpy-1.23.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5039f55555e1eab31124a5768898c9e22c25a65c1e0037f4d7c495a45778c9f2"}, - {file = "numpy-1.23.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f545efd1108e647604a1b5aa809591ccd2540f468a880bedb97247e72db387"}, - {file = "numpy-1.23.5-cp311-cp311-win32.whl", hash = "sha256:b2a9ab7c279c91974f756c84c365a669a887efa287365a8e2c418f8b3ba73fb0"}, - {file = "numpy-1.23.5-cp311-cp311-win_amd64.whl", hash = "sha256:0cbe9848fad08baf71de1a39e12d1b6310f1d5b2d0ea4de051058e6e1076852d"}, - {file = "numpy-1.23.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f063b69b090c9d918f9df0a12116029e274daf0181df392839661c4c7ec9018a"}, - {file = "numpy-1.23.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0aaee12d8883552fadfc41e96b4c82ee7d794949e2a7c3b3a7201e968c7ecab9"}, - {file = "numpy-1.23.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92c8c1e89a1f5028a4c6d9e3ccbe311b6ba53694811269b992c0b224269e2398"}, - {file = "numpy-1.23.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d208a0f8729f3fb790ed18a003f3a57895b989b40ea4dce4717e9cf4af62c6bb"}, - {file = "numpy-1.23.5-cp38-cp38-win32.whl", hash = "sha256:06005a2ef6014e9956c09ba07654f9837d9e26696a0470e42beedadb78c11b07"}, - {file = "numpy-1.23.5-cp38-cp38-win_amd64.whl", hash = "sha256:ca51fcfcc5f9354c45f400059e88bc09215fb71a48d3768fb80e357f3b457e1e"}, - {file = "numpy-1.23.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8969bfd28e85c81f3f94eb4a66bc2cf1dbdc5c18efc320af34bffc54d6b1e38f"}, - {file = "numpy-1.23.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7ac231a08bb37f852849bbb387a20a57574a97cfc7b6cabb488a4fc8be176de"}, - {file = "numpy-1.23.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf837dc63ba5c06dc8797c398db1e223a466c7ece27a1f7b5232ba3466aafe3d"}, - {file = "numpy-1.23.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33161613d2269025873025b33e879825ec7b1d831317e68f4f2f0f84ed14c719"}, - {file = "numpy-1.23.5-cp39-cp39-win32.whl", hash = "sha256:af1da88f6bc3d2338ebbf0e22fe487821ea4d8e89053e25fa59d1d79786e7481"}, - {file = "numpy-1.23.5-cp39-cp39-win_amd64.whl", hash = "sha256:09b7847f7e83ca37c6e627682f145856de331049013853f344f37b0c9690e3df"}, - {file = "numpy-1.23.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:abdde9f795cf292fb9651ed48185503a2ff29be87770c3b8e2a14b0cd7aa16f8"}, - {file = "numpy-1.23.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9a909a8bae284d46bbfdefbdd4a262ba19d3bc9921b1e76126b1d21c3c34135"}, - {file = "numpy-1.23.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:01dd17cbb340bf0fc23981e52e1d18a9d4050792e8fb8363cecbf066a84b827d"}, - {file = "numpy-1.23.5.tar.gz", hash = "sha256:1b1766d6f397c18153d40015ddfc79ddb715cabadc04d2d228d4e5a8bc4ded1a"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] name = "packaging" -version = "24.1" +version = "24.0" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pandas" -version = "1.5.3" +version = "2.2.2" description = "Powerful data structures for data analysis, time series, and statistics" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, - {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, - {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, - {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, - {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, - {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, - {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, - {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, ] [package.dependencies] -numpy = {version = ">=1.21.0", markers = "python_version >= \"3.10\""} +numpy = [ + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, +] python-dateutil = ">=2.8.1" pytz = ">=2020.1" +tzdata = ">=2022.7" [package.extras] -test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "parso" +version = "0.8.4" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] [[package]] name = "partd" @@ -976,6 +1880,103 @@ files = [ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] +[[package]] +name = "pillow" +version = "10.4.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, + {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, + {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, + {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, + {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, + {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, + {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, + {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, + {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, + {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, + {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, + {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, + {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, + {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, + {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, + {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, + {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, + {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, + {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, + {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + [[package]] name = "platformdirs" version = "4.2.2" @@ -992,6 +1993,21 @@ docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx- test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] type = ["mypy (>=1.8)"] +[[package]] +name = "plotly" +version = "5.22.0" +description = "An open-source, interactive data visualization library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "plotly-5.22.0-py3-none-any.whl", hash = "sha256:68fc1901f098daeb233cc3dd44ec9dc31fb3ca4f4e53189344199c43496ed006"}, + {file = "plotly-5.22.0.tar.gz", hash = "sha256:859fdadbd86b5770ae2466e542b761b247d1c6b49daed765b95bb8c7063e7469"}, +] + +[package.dependencies] +packaging = "*" +tenacity = ">=6.2.0" + [[package]] name = "pluggy" version = "1.5.0" @@ -1007,43 +2023,83 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "prompt-toolkit" +version = "3.0.47" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, +] + +[package.dependencies] +wcwidth = "*" + [[package]] name = "psutil" -version = "5.9.8" +version = "6.0.0" description = "Cross-platform lib for process and system monitoring in Python." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, - {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, - {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, - {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, - {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, - {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, - {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, - {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, - {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, - {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, + {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, + {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, + {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, + {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, + {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, + {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, + {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, + {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, + {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, ] [package.extras] test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, +] + +[package.extras] +tests = ["pytest"] + [[package]] name = "pybids" -version = "0.16.1" +version = "0.16.5" description = "bids: interface with datasets conforming to BIDS" optional = false python-versions = ">=3.8" files = [ - {file = "pybids-0.16.1-py3-none-any.whl", hash = "sha256:d7522dd2c7caea38bd83335d5584841f5d8b3430afe3621320d2b1ceffdf9075"}, - {file = "pybids-0.16.1.tar.gz", hash = "sha256:1a6ab06d375f3b783e738826e6d220b2f4145419b4b02f4edbcc8cb7c9b2208a"}, + {file = "pybids-0.16.5-py3-none-any.whl", hash = "sha256:7b3d4b8005644895fcff01d565cc163de9e2aa338f3ef238cec8b2289959eef5"}, + {file = "pybids-0.16.5.tar.gz", hash = "sha256:e4c029e426253a1d56c6c5ce13f2c754d9bf2b854615a2ca59a68a6302a34083"}, ] [package.dependencies] @@ -1058,25 +2114,25 @@ scipy = ">=1.5" sqlalchemy = ">=1.3.16" [package.extras] -ci-tests = ["pybids[test]", "pytest-xdist"] +ci-tests = ["pybids[test]"] dev = ["pybids[doc,plotting,test]"] -doc = ["jupytext", "myst-nb", "numpydoc", "sphinx (>=2.2,!=5.1.0)", "sphinx-rtd-theme"] +doc = ["jupytext", "myst-nb", "numpydoc", "sphinx (>=6.2)", "sphinx-rtd-theme"] docs = ["pybids[doc]"] model-reports = ["altair", "jinja2"] plotting = ["graphviz"] -test = ["altair", "bsmschema", "coverage[toml]", "pytest (>=3.3)", "pytest-cov"] +test = ["altair", "bsmschema", "coverage[toml]", "pytest (>=3.3)", "pytest-cov", "pytest-xdist"] tests = ["pybids[test]"] tutorial = ["ipykernel", "jinja2", "jupyter-client", "markupsafe", "nbconvert"] [[package]] name = "pycodestyle" -version = "2.11.1" +version = "2.12.0" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, + {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, + {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, ] [[package]] @@ -1090,15 +2146,29 @@ files = [ {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + [[package]] name = "pytest" -version = "8.2.2" +version = "8.3.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, - {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, + {file = "pytest-8.3.1-py3-none-any.whl", hash = "sha256:e9600ccf4f563976e2c99fa02c7624ab938296551f280835ee6516df8bc4ae8c"}, + {file = "pytest-8.3.1.tar.gz", hash = "sha256:7e8e5c5abd6e93cb1cc151f23e57adc31fcf8cfd2a3ff2da63e23f732de35db6"}, ] [package.dependencies] @@ -1106,7 +2176,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.5,<2.0" +pluggy = ">=1.5,<2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] @@ -1216,46 +2286,112 @@ files = [ ] [[package]] -name = "scipy" -version = "1.11.4" -description = "Fundamental algorithms for scientific computing in Python" +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "scikit-learn" +version = "1.5.0" +description = "A set of python modules for machine learning and data mining" optional = false python-versions = ">=3.9" files = [ - {file = "scipy-1.11.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc9a714581f561af0848e6b69947fda0614915f072dfd14142ed1bfe1b806710"}, - {file = "scipy-1.11.4-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:cf00bd2b1b0211888d4dc75656c0412213a8b25e80d73898083f402b50f47e41"}, - {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9999c008ccf00e8fbcce1236f85ade5c569d13144f77a1946bef8863e8f6eb4"}, - {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:933baf588daa8dc9a92c20a0be32f56d43faf3d1a60ab11b3f08c356430f6e56"}, - {file = "scipy-1.11.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8fce70f39076a5aa62e92e69a7f62349f9574d8405c0a5de6ed3ef72de07f446"}, - {file = "scipy-1.11.4-cp310-cp310-win_amd64.whl", hash = "sha256:6550466fbeec7453d7465e74d4f4b19f905642c89a7525571ee91dd7adabb5a3"}, - {file = "scipy-1.11.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f313b39a7e94f296025e3cffc2c567618174c0b1dde173960cf23808f9fae4be"}, - {file = "scipy-1.11.4-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1b7c3dca977f30a739e0409fb001056484661cb2541a01aba0bb0029f7b68db8"}, - {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00150c5eae7b610c32589dda259eacc7c4f1665aedf25d921907f4d08a951b1c"}, - {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:530f9ad26440e85766509dbf78edcfe13ffd0ab7fec2560ee5c36ff74d6269ff"}, - {file = "scipy-1.11.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5e347b14fe01003d3b78e196e84bd3f48ffe4c8a7b8a1afbcb8f5505cb710993"}, - {file = "scipy-1.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:acf8ed278cc03f5aff035e69cb511741e0418681d25fbbb86ca65429c4f4d9cd"}, - {file = "scipy-1.11.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:028eccd22e654b3ea01ee63705681ee79933652b2d8f873e7949898dda6d11b6"}, - {file = "scipy-1.11.4-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2c6ff6ef9cc27f9b3db93a6f8b38f97387e6e0591600369a297a50a8e96e835d"}, - {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b030c6674b9230d37c5c60ab456e2cf12f6784596d15ce8da9365e70896effc4"}, - {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad669df80528aeca5f557712102538f4f37e503f0c5b9541655016dd0932ca79"}, - {file = "scipy-1.11.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce7fff2e23ab2cc81ff452a9444c215c28e6305f396b2ba88343a567feec9660"}, - {file = "scipy-1.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:36750b7733d960d7994888f0d148d31ea3017ac15eef664194b4ef68d36a4a97"}, - {file = "scipy-1.11.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e619aba2df228a9b34718efb023966da781e89dd3d21637b27f2e54db0410d7"}, - {file = "scipy-1.11.4-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f3cd9e7b3c2c1ec26364856f9fbe78695fe631150f94cd1c22228456404cf1ec"}, - {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d10e45a6c50211fe256da61a11c34927c68f277e03138777bdebedd933712fea"}, - {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91af76a68eeae0064887a48e25c4e616fa519fa0d38602eda7e0f97d65d57937"}, - {file = "scipy-1.11.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6df1468153a31cf55ed5ed39647279beb9cfb5d3f84369453b49e4b8502394fd"}, - {file = "scipy-1.11.4-cp39-cp39-win_amd64.whl", hash = "sha256:ee410e6de8f88fd5cf6eadd73c135020bfbbbdfcd0f6162c36a7638a1ea8cc65"}, - {file = "scipy-1.11.4.tar.gz", hash = "sha256:90a2b78e7f5733b9de748f589f09225013685f9b218275257f8a8168ededaeaa"}, + {file = "scikit_learn-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12e40ac48555e6b551f0a0a5743cc94cc5a765c9513fe708e01f0aa001da2801"}, + {file = "scikit_learn-1.5.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f405c4dae288f5f6553b10c4ac9ea7754d5180ec11e296464adb5d6ac68b6ef5"}, + {file = "scikit_learn-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df8ccabbf583315f13160a4bb06037bde99ea7d8211a69787a6b7c5d4ebb6fc3"}, + {file = "scikit_learn-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c75ea812cd83b1385bbfa94ae971f0d80adb338a9523f6bbcb5e0b0381151d4"}, + {file = "scikit_learn-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:a90c5da84829a0b9b4bf00daf62754b2be741e66b5946911f5bdfaa869fcedd6"}, + {file = "scikit_learn-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a65af2d8a6cce4e163a7951a4cfbfa7fceb2d5c013a4b593686c7f16445cf9d"}, + {file = "scikit_learn-1.5.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:4c0c56c3005f2ec1db3787aeaabefa96256580678cec783986836fc64f8ff622"}, + {file = "scikit_learn-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f77547165c00625551e5c250cefa3f03f2fc92c5e18668abd90bfc4be2e0bff"}, + {file = "scikit_learn-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:118a8d229a41158c9f90093e46b3737120a165181a1b58c03461447aa4657415"}, + {file = "scikit_learn-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:a03b09f9f7f09ffe8c5efffe2e9de1196c696d811be6798ad5eddf323c6f4d40"}, + {file = "scikit_learn-1.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:460806030c666addee1f074788b3978329a5bfdc9b7d63e7aad3f6d45c67a210"}, + {file = "scikit_learn-1.5.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:1b94d6440603752b27842eda97f6395f570941857456c606eb1d638efdb38184"}, + {file = "scikit_learn-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d82c2e573f0f2f2f0be897e7a31fcf4e73869247738ab8c3ce7245549af58ab8"}, + {file = "scikit_learn-1.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3a10e1d9e834e84d05e468ec501a356226338778769317ee0b84043c0d8fb06"}, + {file = "scikit_learn-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:855fc5fa8ed9e4f08291203af3d3e5fbdc4737bd617a371559aaa2088166046e"}, + {file = "scikit_learn-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:40fb7d4a9a2db07e6e0cae4dc7bdbb8fada17043bac24104d8165e10e4cff1a2"}, + {file = "scikit_learn-1.5.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:47132440050b1c5beb95f8ba0b2402bbd9057ce96ec0ba86f2f445dd4f34df67"}, + {file = "scikit_learn-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174beb56e3e881c90424e21f576fa69c4ffcf5174632a79ab4461c4c960315ac"}, + {file = "scikit_learn-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261fe334ca48f09ed64b8fae13f9b46cc43ac5f580c4a605cbb0a517456c8f71"}, + {file = "scikit_learn-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:057b991ac64b3e75c9c04b5f9395eaf19a6179244c089afdebaad98264bff37c"}, + {file = "scikit_learn-1.5.0.tar.gz", hash = "sha256:789e3db01c750ed6d496fa2db7d50637857b451e57bcae863bff707c1247bef7"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=3.1.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] +build = ["cython (>=3.0.10)", "meson-python (>=0.15.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.15.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] +maintenance = ["conda-lock (==2.5.6)"] +tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.2.1)", "scikit-image (>=0.17.2)"] + +[[package]] +name = "scipy" +version = "1.14.0" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.10" +files = [ + {file = "scipy-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7e911933d54ead4d557c02402710c2396529540b81dd554fc1ba270eb7308484"}, + {file = "scipy-1.14.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:687af0a35462402dd851726295c1a5ae5f987bd6e9026f52e9505994e2f84ef6"}, + {file = "scipy-1.14.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:07e179dc0205a50721022344fb85074f772eadbda1e1b3eecdc483f8033709b7"}, + {file = "scipy-1.14.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:6a9c9a9b226d9a21e0a208bdb024c3982932e43811b62d202aaf1bb59af264b1"}, + {file = "scipy-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:076c27284c768b84a45dcf2e914d4000aac537da74236a0d45d82c6fa4b7b3c0"}, + {file = "scipy-1.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42470ea0195336df319741e230626b6225a740fd9dce9642ca13e98f667047c0"}, + {file = "scipy-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:176c6f0d0470a32f1b2efaf40c3d37a24876cebf447498a4cefb947a79c21e9d"}, + {file = "scipy-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:ad36af9626d27a4326c8e884917b7ec321d8a1841cd6dacc67d2a9e90c2f0359"}, + {file = "scipy-1.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6d056a8709ccda6cf36cdd2eac597d13bc03dba38360f418560a93050c76a16e"}, + {file = "scipy-1.14.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f0a50da861a7ec4573b7c716b2ebdcdf142b66b756a0d392c236ae568b3a93fb"}, + {file = "scipy-1.14.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:94c164a9e2498e68308e6e148646e486d979f7fcdb8b4cf34b5441894bdb9caf"}, + {file = "scipy-1.14.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a7d46c3e0aea5c064e734c3eac5cf9eb1f8c4ceee756262f2c7327c4c2691c86"}, + {file = "scipy-1.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eee2989868e274aae26125345584254d97c56194c072ed96cb433f32f692ed8"}, + {file = "scipy-1.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3154691b9f7ed73778d746da2df67a19d046a6c8087c8b385bc4cdb2cfca74"}, + {file = "scipy-1.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c40003d880f39c11c1edbae8144e3813904b10514cd3d3d00c277ae996488cdb"}, + {file = "scipy-1.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:5b083c8940028bb7e0b4172acafda6df762da1927b9091f9611b0bcd8676f2bc"}, + {file = "scipy-1.14.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff2438ea1330e06e53c424893ec0072640dac00f29c6a43a575cbae4c99b2b9"}, + {file = "scipy-1.14.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bbc0471b5f22c11c389075d091d3885693fd3f5e9a54ce051b46308bc787e5d4"}, + {file = "scipy-1.14.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:64b2ff514a98cf2bb734a9f90d32dc89dc6ad4a4a36a312cd0d6327170339eb0"}, + {file = "scipy-1.14.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:7d3da42fbbbb860211a811782504f38ae7aaec9de8764a9bef6b262de7a2b50f"}, + {file = "scipy-1.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d91db2c41dd6c20646af280355d41dfa1ec7eead235642178bd57635a3f82209"}, + {file = "scipy-1.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a01cc03bcdc777c9da3cfdcc74b5a75caffb48a6c39c8450a9a05f82c4250a14"}, + {file = "scipy-1.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:65df4da3c12a2bb9ad52b86b4dcf46813e869afb006e58be0f516bc370165159"}, + {file = "scipy-1.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:4c4161597c75043f7154238ef419c29a64ac4a7c889d588ea77690ac4d0d9b20"}, + {file = "scipy-1.14.0.tar.gz", hash = "sha256:b5923f48cb840380f9854339176ef21763118a7300a88203ccd0bdd26e58527b"}, ] [package.dependencies] -numpy = ">=1.21.6,<1.28.0" +numpy = ">=1.23.5,<2.3" [package.extras] -dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] -doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] -test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] +test = ["Cython", "array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "six" @@ -1281,64 +2417,64 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.30" +version = "2.0.31" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3b48154678e76445c7ded1896715ce05319f74b1e73cf82d4f8b59b46e9c0ddc"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2753743c2afd061bb95a61a51bbb6a1a11ac1c44292fad898f10c9839a7f75b2"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7bfc726d167f425d4c16269a9a10fe8630ff6d14b683d588044dcef2d0f6be7"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4f61ada6979223013d9ab83a3ed003ded6959eae37d0d685db2c147e9143797"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a365eda439b7a00732638f11072907c1bc8e351c7665e7e5da91b169af794af"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bba002a9447b291548e8d66fd8c96a6a7ed4f2def0bb155f4f0a1309fd2735d5"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-win32.whl", hash = "sha256:0138c5c16be3600923fa2169532205d18891b28afa817cb49b50e08f62198bb8"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-win_amd64.whl", hash = "sha256:99650e9f4cf3ad0d409fed3eec4f071fadd032e9a5edc7270cd646a26446feeb"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:955991a09f0992c68a499791a753523f50f71a6885531568404fa0f231832aa0"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f69e4c756ee2686767eb80f94c0125c8b0a0b87ede03eacc5c8ae3b54b99dc46"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69c9db1ce00e59e8dd09d7bae852a9add716efdc070a3e2068377e6ff0d6fdaa"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1429a4b0f709f19ff3b0cf13675b2b9bfa8a7e79990003207a011c0db880a13"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:efedba7e13aa9a6c8407c48facfdfa108a5a4128e35f4c68f20c3407e4376aa9"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16863e2b132b761891d6c49f0a0f70030e0bcac4fd208117f6b7e053e68668d0"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-win32.whl", hash = "sha256:2ecabd9ccaa6e914e3dbb2aa46b76dede7eadc8cbf1b8083c94d936bcd5ffb49"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-win_amd64.whl", hash = "sha256:0b3f4c438e37d22b83e640f825ef0f37b95db9aa2d68203f2c9549375d0b2260"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5a79d65395ac5e6b0c2890935bad892eabb911c4aa8e8015067ddb37eea3d56c"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a5baf9267b752390252889f0c802ea13b52dfee5e369527da229189b8bd592e"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cb5a646930c5123f8461f6468901573f334c2c63c795b9af350063a736d0134"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:296230899df0b77dec4eb799bcea6fbe39a43707ce7bb166519c97b583cfcab3"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c62d401223f468eb4da32627bffc0c78ed516b03bb8a34a58be54d618b74d472"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3b69e934f0f2b677ec111b4d83f92dc1a3210a779f69bf905273192cf4ed433e"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-win32.whl", hash = "sha256:77d2edb1f54aff37e3318f611637171e8ec71472f1fdc7348b41dcb226f93d90"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-win_amd64.whl", hash = "sha256:b6c7ec2b1f4969fc19b65b7059ed00497e25f54069407a8701091beb69e591a5"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a8e3b0a7e09e94be7510d1661339d6b52daf202ed2f5b1f9f48ea34ee6f2d57"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b60203c63e8f984df92035610c5fb76d941254cf5d19751faab7d33b21e5ddc0"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1dc3eabd8c0232ee8387fbe03e0a62220a6f089e278b1f0aaf5e2d6210741ad"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:40ad017c672c00b9b663fcfcd5f0864a0a97828e2ee7ab0c140dc84058d194cf"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e42203d8d20dc704604862977b1470a122e4892791fe3ed165f041e4bf447a1b"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-win32.whl", hash = "sha256:2a4f4da89c74435f2bc61878cd08f3646b699e7d2eba97144030d1be44e27584"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-win_amd64.whl", hash = "sha256:b6bf767d14b77f6a18b6982cbbf29d71bede087edae495d11ab358280f304d8e"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc0c53579650a891f9b83fa3cecd4e00218e071d0ba00c4890f5be0c34887ed3"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:311710f9a2ee235f1403537b10c7687214bb1f2b9ebb52702c5aa4a77f0b3af7"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:408f8b0e2c04677e9c93f40eef3ab22f550fecb3011b187f66a096395ff3d9fd"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37a4b4fb0dd4d2669070fb05b8b8824afd0af57587393015baee1cf9890242d9"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a943d297126c9230719c27fcbbeab57ecd5d15b0bd6bfd26e91bfcfe64220621"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a089e218654e740a41388893e090d2e2c22c29028c9d1353feb38638820bbeb"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-win32.whl", hash = "sha256:fa561138a64f949f3e889eb9ab8c58e1504ab351d6cf55259dc4c248eaa19da6"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-win_amd64.whl", hash = "sha256:7d74336c65705b986d12a7e337ba27ab2b9d819993851b140efdf029248e818e"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8c62fe2480dd61c532ccafdbce9b29dacc126fe8be0d9a927ca3e699b9491a"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2383146973a15435e4717f94c7509982770e3e54974c71f76500a0136f22810b"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8409de825f2c3b62ab15788635ccaec0c881c3f12a8af2b12ae4910a0a9aeef6"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0094c5dc698a5f78d3d1539853e8ecec02516b62b8223c970c86d44e7a80f6c7"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:edc16a50f5e1b7a06a2dcc1f2205b0b961074c123ed17ebda726f376a5ab0953"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f7703c2010355dd28f53deb644a05fc30f796bd8598b43f0ba678878780b6e4c"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-win32.whl", hash = "sha256:1f9a727312ff6ad5248a4367358e2cf7e625e98b1028b1d7ab7b806b7d757513"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-win_amd64.whl", hash = "sha256:a0ef36b28534f2a5771191be6edb44cc2673c7b2edf6deac6562400288664221"}, - {file = "SQLAlchemy-2.0.30-py3-none-any.whl", hash = "sha256:7108d569d3990c71e26a42f60474b4c02c8586c4681af5fd67e51a044fdea86a"}, - {file = "SQLAlchemy-2.0.30.tar.gz", hash = "sha256:2b1708916730f4830bc69d6f49d37f7698b5bd7530aca7f04f785f8849e95255"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f43e93057cf52a227eda401251c72b6fbe4756f35fa6bfebb5d73b86881e59b0"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d337bf94052856d1b330d5fcad44582a30c532a2463776e1651bd3294ee7e58b"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c06fb43a51ccdff3b4006aafee9fcf15f63f23c580675f7734245ceb6b6a9e05"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:b6e22630e89f0e8c12332b2b4c282cb01cf4da0d26795b7eae16702a608e7ca1"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:79a40771363c5e9f3a77f0e28b3302801db08040928146e6808b5b7a40749c88"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-win32.whl", hash = "sha256:501ff052229cb79dd4c49c402f6cb03b5a40ae4771efc8bb2bfac9f6c3d3508f"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-win_amd64.whl", hash = "sha256:597fec37c382a5442ffd471f66ce12d07d91b281fd474289356b1a0041bdf31d"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dc6d69f8829712a4fd799d2ac8d79bdeff651c2301b081fd5d3fe697bd5b4ab9"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23b9fbb2f5dd9e630db70fbe47d963c7779e9c81830869bd7d137c2dc1ad05fb"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21c97efcbb9f255d5c12a96ae14da873233597dfd00a3a0c4ce5b3e5e79704"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a6a9837589c42b16693cf7bf836f5d42218f44d198f9343dd71d3164ceeeac"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc251477eae03c20fae8db9c1c23ea2ebc47331bcd73927cdcaecd02af98d3c3"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2fd17e3bb8058359fa61248c52c7b09a97cf3c820e54207a50af529876451808"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-win32.whl", hash = "sha256:c76c81c52e1e08f12f4b6a07af2b96b9b15ea67ccdd40ae17019f1c373faa227"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-win_amd64.whl", hash = "sha256:4b600e9a212ed59355813becbcf282cfda5c93678e15c25a0ef896b354423238"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b6cf796d9fcc9b37011d3f9936189b3c8074a02a4ed0c0fbbc126772c31a6d4"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78fe11dbe37d92667c2c6e74379f75746dc947ee505555a0197cfba9a6d4f1a4"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc47dc6185a83c8100b37acda27658fe4dbd33b7d5e7324111f6521008ab4fe"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a41514c1a779e2aa9a19f67aaadeb5cbddf0b2b508843fcd7bafdf4c6864005"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:afb6dde6c11ea4525318e279cd93c8734b795ac8bb5dda0eedd9ebaca7fa23f1"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3f9faef422cfbb8fd53716cd14ba95e2ef655400235c3dfad1b5f467ba179c8c"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-win32.whl", hash = "sha256:fc6b14e8602f59c6ba893980bea96571dd0ed83d8ebb9c4479d9ed5425d562e9"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:3cb8a66b167b033ec72c3812ffc8441d4e9f5f78f5e31e54dcd4c90a4ca5bebc"}, + {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"}, + {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} typing-extensions = ">=4.6.0" [package.extras] @@ -1366,6 +2502,25 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + [[package]] name = "tblib" version = "3.0.0" @@ -1377,6 +2532,32 @@ files = [ {file = "tblib-3.0.0.tar.gz", hash = "sha256:93622790a0a29e04f0346458face1e144dc4d32f493714c6c3dff82a4adb77e6"}, ] +[[package]] +name = "tenacity" +version = "8.5.0" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, + {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + +[[package]] +name = "threadpoolctl" +version = "3.5.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, + {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, +] + [[package]] name = "tomli" version = "2.0.1" @@ -1390,13 +2571,13 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.5" +version = "0.13.0" description = "Style preserving TOML library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, - {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, + {file = "tomlkit-0.13.0-py3-none-any.whl", hash = "sha256:7075d3042d03b80f603482d69bf0c8f345c2b30e41699fd8883227f89972b264"}, + {file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"}, ] [[package]] @@ -1412,52 +2593,22 @@ files = [ [[package]] name = "tornado" -version = "6.1" +version = "6.4.1" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false -python-versions = ">= 3.5" -files = [ - {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, - {file = "tornado-6.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c"}, - {file = "tornado-6.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05"}, - {file = "tornado-6.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910"}, - {file = "tornado-6.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b"}, - {file = "tornado-6.1-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675"}, - {file = "tornado-6.1-cp35-cp35m-win32.whl", hash = "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5"}, - {file = "tornado-6.1-cp35-cp35m-win_amd64.whl", hash = "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68"}, - {file = "tornado-6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb"}, - {file = "tornado-6.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c"}, - {file = "tornado-6.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921"}, - {file = "tornado-6.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558"}, - {file = "tornado-6.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c"}, - {file = "tornado-6.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085"}, - {file = "tornado-6.1-cp36-cp36m-win32.whl", hash = "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575"}, - {file = "tornado-6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795"}, - {file = "tornado-6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f"}, - {file = "tornado-6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102"}, - {file = "tornado-6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4"}, - {file = "tornado-6.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd"}, - {file = "tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01"}, - {file = "tornado-6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d"}, - {file = "tornado-6.1-cp37-cp37m-win32.whl", hash = "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df"}, - {file = "tornado-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37"}, - {file = "tornado-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95"}, - {file = "tornado-6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a"}, - {file = "tornado-6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5"}, - {file = "tornado-6.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288"}, - {file = "tornado-6.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f"}, - {file = "tornado-6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6"}, - {file = "tornado-6.1-cp38-cp38-win32.whl", hash = "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326"}, - {file = "tornado-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c"}, - {file = "tornado-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5"}, - {file = "tornado-6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe"}, - {file = "tornado-6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea"}, - {file = "tornado-6.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2"}, - {file = "tornado-6.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0"}, - {file = "tornado-6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd"}, - {file = "tornado-6.1-cp39-cp39-win32.whl", hash = "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c"}, - {file = "tornado-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4"}, - {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, +python-versions = ">=3.8" +files = [ + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, ] [[package]] @@ -1471,15 +2622,26 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -1490,13 +2652,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.2" +version = "20.26.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, - {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, ] [package.dependencies] @@ -1508,6 +2670,17 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + [[package]] name = "wrapt" version = "1.16.0" @@ -1587,6 +2760,17 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] +[[package]] +name = "xyzservices" +version = "2024.6.0" +description = "Source of XYZ tiles providers" +optional = false +python-versions = ">=3.8" +files = [ + {file = "xyzservices-2024.6.0-py3-none-any.whl", hash = "sha256:fecb2508f0f2b71c819aecf5df2c03cef001c56a4b49302e640f3b34710d25e4"}, + {file = "xyzservices-2024.6.0.tar.gz", hash = "sha256:58c1bdab4257d2551b9ef91cd48571f77b7c4d2bc45bf5e3c05ac97b3a4d7282"}, +] + [[package]] name = "zict" version = "3.0.0" @@ -1598,7 +2782,22 @@ files = [ {file = "zict-3.0.0.tar.gz", hash = "sha256:e321e263b6a97aafc0790c3cfb3c04656b7066e6738c37fffcca95d803c9fba5"}, ] +[[package]] +name = "zipp" +version = "3.19.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + [metadata] lock-version = "2.0" -python-versions = "~3.10" -content-hash = "a4f985eb9db0765c116b5eda0de8aff2ed8b61b44f33e0a7de00d9f652398079" +python-versions = ">=3.10, <4.0" +content-hash = "76f5571674cc44fe7ccbbaf7f23875f86b8be5cb431ecd5da7d2b013c2c6af41" diff --git a/pyproject.toml b/pyproject.toml index 324b427..a5d76be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,8 +4,8 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "launchcontainers" -version = "0.3.16" -description = "Launch Containers Package" +version = "0.3.18" +description = "A Python based soft to launch neuroimaging containers on clusters" authors = [ "Garikoitz Lerma-Usabiaga", "Leandro Lecca", "Mengxing Liu", @@ -18,26 +18,48 @@ maintainers = ["Garikoitz Lerma-Usabiaga", "Yongning Lei"] readme = "README.md" repository = "https://github.com/garikoitz/launchcontainers" license = "MIT" -keywords = ["HPC", "MRI", "RTP2","DWI","dMRI"] -classifiers = ["Topic :: Software Development :: Libraries :: Python Modules"] +keywords = ["HPC", "MRI","nilearn", "RTP2","DWI","dMRI"] +classifiers = [ + "Topic :: Software Development :: Libraries :: Python Modules", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + ] packages = [{include = "launchcontainers", from = "src"}] [tool.poetry.scripts] launchcontainers = "launchcontainers.launch:main" [tool.poetry.dependencies] -python = "^3.10" +# bokeh +bokeh = ">=2.4.2" +# python after Oct 4, 2021 +python = "^3.10" +# bids no limit bids = "*" -pybids = "0.16.1" -distributed = "2022.7.0" -dask = "2022.7.0" -dask_jobqueue = "0.8.1" -nibabel = "5.1.0" -numpy = "1.23.5" -pandas = "1.5.3" -scipy = "1.11.4" +# ipython after Oct 30, 2021 +ipython = ">=7.29.0" +# pybids after May 1, 2023 +pybids = "^0.16.1" +# distributed belongs to dask, it is after Jan, 2024 +distributed = ">=2024.1.0" +dask = ">=2024.1.0" +# dask-jobqueue after Feb 22, 2024 +dask-jobqueue = ">=0.8.5" +# After April 3, 2023 +nibabel = "^5.1.0" +# After Jan 2, 2024 +numpy = ">=1.26.3" +# After Jan 19, 2023 +pandas = ">=1.5.3" +# After Nov 18, 2023 +scipy = "^1.11.4" +# After July 18, 2023 PyYAML = "6.0.1" coverage-conditional-plugin = "^0.7.0" +gdist = "==2.1.0" +scikit-learn = ">=1.5.0" +nilearn = ">=0.10.4" [tool.poetry.dev-dependencies] nox-poetry = "*" @@ -51,4 +73,5 @@ flake8-bugbear = "*" flake8-broken-line = "*" flake8-comprehensions = "*" # Formatting -black = {version = "^23.1a1", allow-prereleases = true} +black = { version = "^23.1a1", allow-prereleases = true} +black = { version = "^23.1a1", allow-prereleases = true} \ No newline at end of file diff --git a/src/launchcontainers/__init__.py b/src/launchcontainers/__init__.py old mode 100755 new mode 100644 index e2dee03..e69de29 --- a/src/launchcontainers/__init__.py +++ b/src/launchcontainers/__init__.py @@ -1,16 +0,0 @@ - - -""" -MIT License - -Copyright (c) 2020-2023 Garikoitz Lerma-Usabiaga -Copyright (c) 2020-2022 Mengxing Liu -Copyright (c) 2022-2023 Leandro Lecca -Copyright (c) 2022-2023 Yongning Lei -Copyright (c) 2023 David Linhardt -Copyright (c) 2023 Iñigo Tellaetxe - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -""" \ No newline at end of file diff --git a/src/launchcontainers/configs/config_queue_job.yaml b/src/launchcontainers/configs/config_queue_job.yaml deleted file mode 100755 index 564dc74..0000000 --- a/src/launchcontainers/configs/config_queue_job.yaml +++ /dev/null @@ -1,179 +0,0 @@ -# jobqueue: -# oar: -# name: dask-worker - -# # Dask worker options -# cores: null # Total number of cores per job -# memory: null # Total amount of memory per job -# processes: 1 # Number of Python processes per job - -# interface: null # Network interface to use like eth0 or ib0 -# death-timeout: 60 # Number of seconds to wait if a worker can not find a scheduler -# local-directory: null # Location of fast local storage like /scratch or $TMPDIR - -# # OAR resource manager options -# shebang: "#!/usr/bin/env bash" -# queue: null -# project: null -# walltime: '00:30:00' -# extra: [] -# env-extra: [] -# resource-spec: null -# job-extra: [] -# log-directory: null - -# pbs: -# name: dask-worker - -# # Dask worker options -# cores: null # Total number of cores per job -# memory: null # Total amount of memory per job -# processes: 1 # Number of Python processes per job - -# interface: null # Network interface to use like eth0 or ib0 -# death-timeout: 60 # Number of seconds to wait if a worker can not find a scheduler -# local-directory: null # Location of fast local storage like /scratch or $TMPDIR - -# # PBS resource manager options -# shebang: "#!/usr/bin/env bash" -# queue: null -# project: null -# walltime: '00:30:00' -# extra: [] -# env-extra: [] -# resource-spec: null -# job-extra: [] -# log-directory: null - -# sge: -# name: dask-worker - -# # Dask worker options -# cores: null # Total number of cores per job -# memory: null # Total amount of memory per job -# processes: 1 # Number of Python processes per job - -# interface: null # Network interface to use like eth0 or ib0 -# death-timeout: 60 # Number of seconds to wait if a worker can not find a scheduler -# local-directory: null # Location of fast local storage like /scratch or $TMPDIR - -# # SGE resource manager options -# shebang: "#!/usr/bin/env bash" -# queue: null -# project: null -# walltime: '00:30:00' -# extra: [] -# env-extra: [] -# job-extra: [] -# log-directory: null - -# resource-spec: null - -# slurm: -# name: dask-worker - -# # Dask worker options -# cores: null # Total number of cores per job -# memory: null # Total amount of memory per job -# processes: 1 # Number of Python processes per job - -# interface: null # Network interface to use like eth0 or ib0 -# death-timeout: 60 # Number of seconds to wait if a worker can not find a scheduler -# local-directory: null # Location of fast local storage like /scratch or $TMPDIR - -# # SLURM resource manager options -# shebang: "#!/usr/bin/env bash" -# queue: null -# project: null -# walltime: '00:30:00' -# extra: [] -# env-extra: [] -# job-cpu: null -# job-mem: null -# job-extra: [] -# log-directory: null - -# moab: -# name: dask-worker - -# # Dask worker options -# cores: null # Total number of cores per job -# memory: null # Total amount of memory per job -# processes: 1 # Number of Python processes per job - -# interface: null # Network interface to use like eth0 or ib0 -# death-timeout: 60 # Number of seconds to wait if a worker can not find a scheduler -# local-directory: null # Location of fast local storage like /scratch or $TMPDIR - -# # PBS resource manager options -# shebang: "#!/usr/bin/env bash" -# queue: null -# project: null -# walltime: '00:30:00' -# extra: [] -# env-extra: [] -# resource-spec: null -# job-extra: [] -# log-directory: null - -# lsf: -# name: dask-worker - -# # Dask worker options -# cores: null # Total number of cores per job -# memory: null # Total amount of memory per job -# processes: 1 # Number of Python processes per job - -# interface: null # Network interface to use like eth0 or ib0 -# death-timeout: 60 # Number of seconds to wait if a worker can not find a scheduler -# local-directory: null # Location of fast local storage like /scratch or $TMPDIR - -# # LSF resource manager options -# shebang: "#!/usr/bin/env bash" -# queue: null -# project: null -# walltime: '00:30' -# extra: [] -# env-extra: [] -# ncpus: null -# mem: null -# job-extra: [] -# log-directory: null -# lsf-units: null -# use-stdin: null - -# htcondor: -# name: dask-worker - -# # Dask worker options -# cores: null # Total number of cores per job -# memory: null # Total amount of memory per job -# processes: 1 # Number of Python processes per job - -# interface: null # Network interface to use like eth0 or ib0 -# death-timeout: 60 # Number of seconds to wait if a worker can not find a scheduler -# local-directory: null # Location of fast local storage like /scratch or $TMPDIR - -# # HTCondor Resource Manager options -# disk: null # Total amount of disk per job -# extra: [] -# env-extra: [] -# job-extra: {} # Extra submit attributes -# log-directory: null -# shebang: "#!/usr/bin/env condor_submit" - -# local: -# name: dask-worker -# # Dask worker options -# cores: null # Total number of cores per job -# memory: null # Total amount of memory per job -# processes: 1 # Number of Python processes per job - -# interface: null # Network interface to use like eth0 or ib0 -# death-timeout: 60 # Number of seconds to wait if a worker can not find a scheduler -# local-directory: null # Location of fast local storage like /scratch or $TMPDIR - -# extra: [] -# env-extra: [] -# job-extra: [] -# log-directory: null \ No newline at end of file diff --git a/src/launchcontainers/configs/container_specific_example_configs.zip b/src/launchcontainers/configs/container_specific_example_configs.zip index a1861fe..1231ae8 100644 Binary files a/src/launchcontainers/configs/container_specific_example_configs.zip and b/src/launchcontainers/configs/container_specific_example_configs.zip differ diff --git a/src/launchcontainers/configs/example_debug.json b/src/launchcontainers/configs/example_debug.json deleted file mode 100755 index c16ecf8..0000000 --- a/src/launchcontainers/configs/example_debug.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - // Use IntelliSense to learn about possible attributes. - // Hover to view descriptions of existing attributes. - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "0.2.0", - "configurations": [ - - { - "name": "Python Debugger: Current File with Arguments", - "type": "debugpy", - "request": "launch", - "program": "launch.py", - "console": "integratedTerminal", - "args": "-lcc /fileserver/project/proj_gari/training/scripts/config.yaml -ssl /fileserver/project/proj_gari/training/scripts/subSesList.txt -cc /fileserver/project/proj_gari/training/scripts/rtppreproc_1.2.0-3.0.3_config.json" - } - ] -} \ No newline at end of file diff --git a/src/launchcontainers/configs/l1_glm.yaml b/src/launchcontainers/configs/l1_glm.yaml new file mode 100644 index 0000000..13ad0b0 --- /dev/null +++ b/src/launchcontainers/configs/l1_glm.yaml @@ -0,0 +1,166 @@ + +experiment: + task: fLoc + # Runs to process + run_nums: ["01", "02", "03", "04", "05", "06"] + # dummy scan of your experiment + dummy_scans: 6 +model: + # fMRI time-series space: volumetric or surface based + # valid options: EPI, MNI, fsnative, fsaverage. Currently only valid for fsnative and fsaverage + space: fsnative + + # if is surface based, choose which hemisphere, input is a list, by default it would be both hemisphere because it is fast + hemis: ['lh','rh'] + + # TODO: whether mask the image to conduct the GLM only on some part to increase power + mask_EPI: False + + # Valid option: fslabel, bimap.nii + # if mask EPI is true, then you need to choose mask method: either binary mask xx.nii or freesurfer label + mask_method: fslabel + + # The things between ?h.xx.label + # if space is freesurfer space, give the label name, TODO: if space is individual T1, + fslabel_name: votc + + # full path or simply the name of this file, + # if it is not full path, then you need to put it under the default place /basedir/bidsdir/derivatives/l1_glm/analysis-xxx/ + # if mask_methods is bimap.nii this option will be used + # please, put this nii file under the analysis folder of the l1_glm, + maskfile_nii_path: /path/to/nii + + # From the fMRIPrep command, align slice time correction to start of TR + slice_time_ref: 0.5 #0.5 or 0 + + # HRF model to use + hrf_model: spm + + # Do not high_pass since we use fMRIPrep's cosine regressors + drift_model: None # Do not high_pass since we use fMRIPrep's cosine regressors + + # Do not high_pass since we use fMRIPrep's cosine regressors + drift_order: 0 # Do not high_pass since we use fMRIPrep's cosine regressors + + # Do not high_pass since we use fMRIPrep's cosine regressors + high_pass: None # Do not high_pass since we use fMRIPrep's cosine regressors + + # Motion regressors to use + motion_regressors: [ + "framewise_displacement", + "rot_x", + "rot_y", + "rot_z", + "trans_x", + "trans_y", + "trans_z", + ] + + # currently don't know how to use the following three + use_acompcor: True + use_non_steady_state: True + use_consine_regressors: True + +contrast_groups: + All: + - adult + - child + - body + - limb + - ES_word + - ES_FF + - ES_CB + - ES_CS + - ES_SC + AllnoLimbs: + - adult + - child + - ES_word + - ES_FF + - ES_CB + - ES_CS + - ES_SC + AllnoWord: + - adult + - child + - body + - limb + - ES_FF + - ES_CB + - ES_CS + - ES_SC + AllnoWordnoLEX: + - adult + - child + - body + - limb + - ES_CB + - ES_SC + AllnoFace: + - body + - limb + - ES_word + - ES_FF + - ES_CB + - ES_CS + - ES_SC + LEXPER: + - ES_FF + - ES_CB + - ES_CS + - ES_SC + PER: + - ES_CB + - ES_SC + LEX: + - ES_CS + - ES_FF + Faces: + - adult + - child + Word: + - ES_word + Limbs: + - body + - limb + CB: + - ES_CB + SC: + - ES_SC + CS: + - ES_CS + FF: + - ES_FF + Adult: + - adult + Child: + - child + Limb: + - limb + Body: + - body +contrasts: + - AllvsNull + - PERvsNull + - LEXvsNull + - PERvsLEX + - WordvsLEX + - WordvsPER + - WordvsLEXPER + - WordvsAllnoWordnoLEX + - WordvsAllnoWord + - LEXvsAllnoWordnoLEX + - SCvsCB + - CSvsFF + - FacesvsNull + - FacesvsLEX + - FacesvsPER + - FacesvsLEXPER + - FacesvsAllnoFace + - AdultvsChild + - LimbsvsNull + - LimbsvsLEX + - LimbsvsPER + - LimbsvsLEXPER + - LimbsvsAllnoLimbs + - BodyvsLimb \ No newline at end of file diff --git a/src/launchcontainers/configs/lc_config_0.4.yaml b/src/launchcontainers/configs/lc_config_0.4.yaml new file mode 100755 index 0000000..264d95d --- /dev/null +++ b/src/launchcontainers/configs/lc_config_0.4.yaml @@ -0,0 +1,241 @@ +# Input guide: +# replace the part of this template config.yaml with your own dataset +# Input value type: +# str: string, python will read if as string, type a space after : and input the string directly +# bool: boolean value, True of False, needs to be capitalize first letter +# int: integer, similar to string, type a space after : +# None: python None type, Usually used for optional arguement, don't put anything after column, not even "" + +general: + # general setting of launchcontainer soft + # Base directory of project + basedir: /Users/tiger/Desktop/VOTCLOC + # Name of bids directory, 1 level under basedir, must contain dataset_description + bidsdir_name: BIDS + # Directory contains singularity images + # if it is a python project, no need + containerdir: /bcbl/home/public/Gari/singularity_images + # Name of the container + # VALID OPTIONS: freesurferator, rtp2-preproc, rtp2-pipeline, anatrois, rtppreproc, rtp-pipeline, l1_glm, fmriprep + container: l1_glm + # Name of analysis folder + analysis_name: test_prepare_glm + # Place the computing will be held. + # VALID OPTIONS: local, BCBL, DIPC. + host: local + # Whether force to overwrite + force: True + # Verbosity of command-line console. If true, only print information at level: CRITICAL (based on python logging package) + print_command_only: False + # Log directory to store launchcontainers logging + # VALID OPTIONS: analysis_dir or full path you want + log_dir: analysis_dir + # Name of launchcontainers log file + log_filename: m1_test_0.4.10_l1_glm + +container_specific: + anatrois: + # Version identifier for container + version: 4.6.1-7.3.2 + # Pre-run freesurfer or not? + pre_fs: True + # Directory name of your pre-run freesurfer, this directory should be under /basedir/bidsdir/derivatives + prefs_dir_name: anatrois_4.6.1-7.3.2 + # Analysis name of pre-run freesurfer + prefs_analysis_name: "6prefs_from_fmriprep" + # A super identifier to find the pattern no need to change + prefs_zipname: '^anatrois_S.*\.zip$' + # optional + # Freesurfer annotefiles + annotfile: + # MNI roi zip file + mniroizip: + + rtppreproc: + # Version identifier for container + version: 1.2.0-3.0.3 + # anatrois or freesurferator dir, used to find the brainmask + precontainer_anat: anatrois_4.6.1-7.3.2 + # Analysis name + anat_analysis_name: fMRIprep_brainmask + # optional + # if your encoding direction DWI is multishell sequence + multishell: True + # If reverse phase encoding is applied in the sequence + # It checks if there is a reverse phase encoding acquisition + # Old dcm2nixx will not create empty bvec and bval files if there was an acquisition with just b0-s + rpe: True + + rtp-pipeline: + # Version identifier for container + version: 4.5.2-3.0.3 + # anatrois or freesurferator dir, used to find the brainmask and fs/ROIs + precontainer_anat: anatrois_4.6.1-7.3.2 + # Analysis name + anat_analysis_name: fulltract_anatrerun + # rtppreproc or rtp2-preproc dir, used to find the dwi.nii.gz, bvec and bval + precontainer_preproc: rtppreproc_1.2.0-3.0.3 + # Analysis name + preproc_analysis_name: 6sub_wrongbvec + + freesurferator: + # Version identifier for container + version: 0.2.0-7.4.1rc19 + # Pre-run freesurfer or not? + pre_fs: True + # Directory name of your pre-run freesurfer, this directory should be under /basedir/bidsdir/derivatives + prefs_dir_name: freesurferator_0.2.0-7.4.1rc19 + # Analysis name of pre-run freesurfer + prefs_analysis_name: control_points_02 + # A super identifier to find the pattern + prefs_zipname: '^freesurferator_S.*\.zip$' + # If you want to use the control points created in the previous analysis (control.dat), set this True: + control_points: False + # If you created control points, you'll have an unzipped folder in the output analysis. Fill prefs_unzipname + # with the name of the unzipped folder to let launchcontainers create a symbolic link to the control.dat + prefs_unzipname: 'S.*$' + # optional + # Freesurfer annotefiles + annotfile: + # MNI roi zip file + mniroizip: + + rtp2-preproc: + # Version identifier for container + version: 0.1.0_3.0.4rc31 + # anatrois or freesurferator dir, used to find the brainmask + precontainer_anat: freesurferator_0.2.0-7.4.1rc19 + # Analysis name + anat_analysis_name: control_points_02 + # optional + # if your encoding direction DWI is multishell sequence + multishell: True + # If reverse phase encoding is applied in the sequence + # It checks if there is a reverse phase encoding acquisition + # if not, launchcontainers will create mock files + rpe: True + # Full Path to qunatitative MRI maps, must be nifti format + qmap_nifti: /home/tlei/Desktop/FG.nii.gz + + rtp2-pipeline: + # Version identifier for container + version: 0.1.0_3.0.4rc20 + # anatrois or freesurferator dir, used to find the brainmask and fs/ROIs + precontainer_anat: freesurferator_0.2.0-7.4.1rc19 + # Analysis name + anat_analysis_name: control_points_02 + # rtppreproc or rtp2-preproc dir, used to find the dwi.nii.gz, bvec and bval + precontainer_preproc: rtp2-preproc_0.1.0_3.0.4rc31 + # Analysis name + preproc_analysis_name: control_points_02 + # optional + # Path to tractparams files, needs to be a .csv + tractparams: /home/tlei/tlei/LMC_DWI_course/scripts/tractparams_short_course.csv + # Path to brain.nii.gz of freesurfer If use fsmask or define manually, this option is set in case you need + fsmask: /home/tlei/Desktop/FG.nii.gz + # zip file for rtp2-pipeline + qmap_zip: /home/tlei/Desktop/annnnooott.zip + + l1_glm: + # Version identifier for container + version: default + # fmriprep folder name under derivatives + fmriprep_dir_name: fmriprep + # fmriprep analysis name (The input), used to get the preprocessed fMRI time-series + # output name is in section general:analysis_name + fmriprep_ana_name: rerun_nordic_fmap + # you want to use another freesurfer or not, usually we use the things under fmriprep sourcedata/freesurfer + pre_fs: True + # The dir name of freesurfer folder you want to use + pre_fs_full_path: freesurfer + # The directory of onset folders + onsetdir: /Users/tiger/Desktop/onset + # The folder structure of onset times, Kepa SPM or fLoc or BIDS, if BIDS, do nothing. Else, needs to do something in the prepare mode + onset_format: fLoc + # this one will work in prepare mode, if false, prepare mode will do nothing but keeping the data provenance + # if True, it will do the time-series smoothing using freesurfer/ or nilearn + smooth_time_series: False + # a list of FWHM kernel for doing the smoothing, this will only be used for the prepare mode + # usually I will do 2 and 4 + smooth_kernel: [2,4] + # if use the smoothed time-series. If True, will call the time_series_smooth_kernel + use_smoothed: False + # specify which smoothed time series you will use + time_series_smooth_kernel: 2 + +host_options: + # Default BCBL + BCBL: + # for SGE, it is always false + use_module: False + apptainer: apptainer/latest + maxwall: 10 + manager: sge + name: "anatrois" + # Dask worker options + # Total number of cores per job (it was core for BCBL) + cores: 6 + # Total amount of memory per job (it was mem for BCBL) + memory: 32G + # Number of Python processes per job + processes: 1 + # Network interface to use like eth0 or ib0 + interface: lo + # Number of seconds to wait if a worker can not find a scheduler + death-timeout: 100 + # Location of fast local storage like /scratch or $TMPDIR + local-directory: null + # SGE resource manager options + # It was que in BCBL + queue: long.q + project: null + walltime: 25:30:00' + extra: [] + env-extra: [] + job-extra: [] + resource-spec: null + bind_options: ['/bcbl', '/tmp','/scratch'] + + # Default DIPC + DIPC: + # Total amount of memory per job + memory: 32G + # SLURM queue + queue: regular + # Total number of cores per job + cores: 24 + # Walltime for the job + walltime: '22:00:00' + # for SLURM, it is always false + use_module: False + apptainer: Singularity/3.5.3-GCC-8.3.0 + manager: slurm + system: scratch + name: "anatrois" + tmpdir: /scratch/llecca/tmp + bind_options: ['/scratch'] + + # Local host options + local: + # Dask manager type + manager: local + # if the local machine use module load this option will give you different version of job-queue cmd + use_module: False + apptainer: apptainer/latest + # Copy the example list: for BCBL we need ['/bcbl', '/tmp', '/export']; for okazaki we need ['/fileserver', '/tmp'] + bind_options: ['/bcbl', '/tmp', '/export'] + # This can only be serial or parallel, any other options will make it fail. + launch_mode: parallel + # total cores you ask from your PC + n_cores: 7 + processes: False # default True + # Use Processes (processes=True): + # If your tasks are CPU-bound and you need to avoid the GIL. + # If tasks are memory-intensive and you want memory isolation to prevent memory leaks from affecting other tasks. + # Use Threads (processes=False): + # If your tasks are I/O-bound and can benefit from sharing memory between threads. + # If you have tasks that involve a lot of shared state or require low overhead in terms of process management. + # Memory limit per worker + memory_limit: 8GiB + # If you used dask to launch pipelines, set it to 2, if you used dask to launch l1_glm, set it to an appropriate number, 4 or 6 + threads_per_worker: 2 diff --git a/src/launchcontainers/configs/example_lc_config.yaml b/src/launchcontainers/configs/lc_config_030.yaml similarity index 94% rename from src/launchcontainers/configs/example_lc_config.yaml rename to src/launchcontainers/configs/lc_config_030.yaml index 9190558..59b007d 100644 --- a/src/launchcontainers/configs/example_lc_config.yaml +++ b/src/launchcontainers/configs/lc_config_030.yaml @@ -27,6 +27,7 @@ general: print_command_only: False # Log directory to store launchcontainers logging # VALID OPTIONS: analysis_dir or full path you want + # This log dir will be used for dask_log and launchcontainer log log_dir: analysis_dir # Name of launchcontainers log file log_filename: lc_log @@ -57,8 +58,8 @@ container_specific: # Analysis name anat_analysis_name: fMRIprep_brainmask # optional - # if your encoding direction DWI is multishell sequence - multishell: True + # if your encoding direction DWI is multishell sequence and they are in seperate files + separated_shell_files: False # If reverse phase encoding is applied in the sequence # It checks if there is a reverse phase encoding acquisition # Old dcm2nixx will not create empty bvec and bval files if there was an acquisition with just b0-s @@ -106,8 +107,8 @@ container_specific: # Analysis name anat_analysis_name: control_points_02 # optional - # if your encoding direction DWI is multishell sequence - multishell: True + # if your encoding direction DWI is multishell sequence and they are in seperate files + separated_shell_files: False # If reverse phase encoding is applied in the sequence # It checks if there is a reverse phase encoding acquisition # if not, launchcontainers will create mock files @@ -188,7 +189,7 @@ host_options: # Copy the example list: for BCBL we need ['/bcbl', '/tmp', '/export']; for okazaki we need ['/fileserver', '/tmp'] bind_options: ['/bcbl', '/tmp', '/scratch', '/export'] manager: 'local' - # This can only be serial or parallel, any other options will make it fail. + # This can only be serial, parallel, or dask_worker any other options will make it fail. launch_mode: 'serial' # Arguments below only affect to parallel launch mode njobs: 5 diff --git a/src/launchcontainers/configs/sub_ses_list.txt b/src/launchcontainers/configs/sub_ses_list.txt new file mode 100644 index 0000000..17b1acd --- /dev/null +++ b/src/launchcontainers/configs/sub_ses_list.txt @@ -0,0 +1,8 @@ +sub,ses,RUN,anat,dwi,func +05,day1VA,False,True,True,True +05,day1VB,False,True,True,True +05,day2VA,False,True,True,True +05,day2VB,False,True,True,True +05,day3PF,True,True,True,True +05,day5BCBL,True,True,True,True +05,day6BCBL,True,True,True,True diff --git a/src/launchcontainers/prepare_inputs/dask_scheduler_config.py b/src/launchcontainers/dask_scheduler_config.py similarity index 80% rename from src/launchcontainers/prepare_inputs/dask_scheduler_config.py rename to src/launchcontainers/dask_scheduler_config.py index f562f47..48edecd 100755 --- a/src/launchcontainers/prepare_inputs/dask_scheduler_config.py +++ b/src/launchcontainers/dask_scheduler_config.py @@ -20,14 +20,13 @@ logger = logging.getLogger("Launchcontainers") -def initiate_cluster(jobqueue_config, n_job, logdir): +def initiate_cluster(jobqueue_config, optimal_n_workers, logdir): ''' Parameters ---------- jobqueue_config : dictionary read the jobquene_yaml from the yaml file - n_job : not clear what should it be - basically it's a quene specific thing, needs to check if it's dask specific. + optimal_n_workers : number of workers you give to the dask cluster Returns ------- @@ -37,12 +36,9 @@ def initiate_cluster(jobqueue_config, n_job, logdir): ''' config.set(distributed__comm__timeouts__tcp="90s") config.set(distributed__comm__timeouts__connect="90s") - config.set(scheduler="single-threaded") config.set({"distributed.scheduler.allowed-failures": 50}) config.set(admin__tick__limit="3h") - #config.set({"distributed.worker.use-file-locking": False}) - - if jobqueue_config["manager"] in ["sge","slurm"] and not os.path.exists(logdir): + if not os.path.exists(logdir): os.makedirs(logdir) if "sge" in jobqueue_config["manager"]: @@ -68,7 +64,7 @@ def initiate_cluster(jobqueue_config, n_job, logdir): # shebang=jobqueue_config["shebang"], # python=None, # config_name=None, - # n_workers=n_job, + # n_workers=optimal_n_workers, # silence_logs=None, # asynchronous=None, # security=None, @@ -78,7 +74,7 @@ def initiate_cluster(jobqueue_config, n_job, logdir): # resource_spec=jobqueue_config["resource-spec"], walltime=jobqueue_config["walltime"])#, #job_extra_directives=job_extra_directives) - cluster_by_config.scale(jobs=n_job) + cluster_by_config.scale(jobs=optimal_n_workers) elif "slurm" in jobqueue_config["manager"]: envextra = [f"module load {jobqueue_config['apptainer']} ",\ @@ -94,18 +90,20 @@ def initiate_cluster(jobqueue_config, n_job, logdir): death_timeout = 300,#jobqueue_config["death-timeout"], walltime=jobqueue_config["walltime"], job_extra_directives = ["--export=ALL"]) - cluster_by_config.scale(jobs=n_job) + cluster_by_config.scale(jobs=optimal_n_workers) elif "local" in jobqueue_config["manager"]: - logger.debug("defining local cluster") - cluster_by_config = LocalCluster( - processes = False, - n_workers = n_job, - threads_per_worker = jobqueue_config["threads_per_worker"], - memory_limit = jobqueue_config["memory_limit"], - ) - + launch_mode=jobqueue_config['launch_mode'] + if launch_mode=="parallel": + logger.debug("defining local cluster in parallel mode") + cluster_by_config = LocalCluster( + processes = jobqueue_config["processes"], + n_workers = optimal_n_workers, + threads_per_worker = jobqueue_config["threads_per_worker"], + memory_limit = jobqueue_config["memory_limit"], + ) + else: - logger.warning( + logger.error( "dask configuration wasn't detected, " "if you are using a cluster please look at " "the jobqueue YAML example, modify it so it works in your cluster " @@ -114,14 +112,11 @@ def initiate_cluster(jobqueue_config, n_job, logdir): "You can find a jobqueue YAML example in the pySPFM/jobqueue.yaml file." ) cluster_by_config = None - # print(f"----------------This is the self report of function initiate_cluster()\n, the cluster was defined as the {jobqueue_config['manager']}cluster \n") - # print(f"----------------------------The cluster job_scipt is {cluster_by_config.job_script()} \n") - # print(f"----check for job scale, the number of jobs is {n_job}") - # print(f"-----under of initiate_cluster() report the cluster is {cluster_by_config}") return cluster_by_config -def dask_scheduler(jobqueue_config, n_job, logdir): +def dask_scheduler(jobqueue_config, optimal_n_workers, logdir): + if jobqueue_config is None: logger.warning( "dask configuration wasn't detected, " @@ -133,8 +128,14 @@ def dask_scheduler(jobqueue_config, n_job, logdir): ) cluster = None else: - cluster = initiate_cluster(jobqueue_config, n_job, logdir) - + if not "local" in jobqueue_config["manager"]: + cluster = initiate_cluster(jobqueue_config, optimal_n_workers, logdir) + + elif "local" in jobqueue_config["manager"]: + launch_mode=jobqueue_config['launch_mode'] + if not launch_mode=="serial": + cluster = initiate_cluster(jobqueue_config, optimal_n_workers, logdir) + client = None if cluster is None else Client(cluster) return client, cluster diff --git a/src/launchcontainers/generate_command.py b/src/launchcontainers/generate_command.py new file mode 100644 index 0000000..fb51613 --- /dev/null +++ b/src/launchcontainers/generate_command.py @@ -0,0 +1,213 @@ +import os.path as op +import logging + +logger = logging.getLogger("Launchcontainers") + +def dwi_command( + lc_config, sub, ses, ananlysis_dir +): + """Puts together the command to send to the container. + + Args: + lc_config (str): _description_ + sub (str): _description_ + ses (str): _description_ + ananlysis_dir (str): _description_ + + Raises: + ValueError: Raised in presence of a faulty config.yaml file, or when the formed command is not recognized. + + Returns: + _type_: _description_ + """ + + container = lc_config["general"]["container"] + host = lc_config["general"]["host"] + containerdir = lc_config["general"]["containerdir"] + + # Information relevant to the host and container + jobqueue_config = lc_config["host_options"][host] + version = lc_config["container_specific"][container]["version"] + use_module = jobqueue_config["use_module"] + bind_options = jobqueue_config["bind_options"] + + # Location of the Singularity Image File (.sif) + container_name = op.join(containerdir, f"{container}_{version}.sif") + + # Define the directory and the file name to output the log of each subject + logdir = op.join(ananlysis_dir, "sub-" + sub, "ses-" + ses, "output", "log") + logfilename = f"{logdir}/t-{container}-sub-{sub}_ses-{ses}" + + subject_derivatives_path = op.join(ananlysis_dir, f"sub-{sub}", f"ses-{ses}") + + # Define the cmd goes before the main command + bind_cmd = "" + for bind in bind_options: + bind_cmd += f"--bind {bind}:{bind} " + + env_cmd = "" + if host == "local": + if use_module == True: + env_cmd = f"module load {jobqueue_config['apptainer']} && " + + # Define the main command + if container in ["anatrois", "rtppreproc", "rtp-pipeline"]: + logger.info("\n" + "start to generate the DWI PIPELINE command") + logger.debug( + f"\n the sub is {sub} \n the ses is {ses} \n the analysis dir is {ananlysis_dir}" + ) + cmd = ( + f"{env_cmd}singularity run -e --no-home {bind_cmd}" + f"--bind {subject_derivatives_path}/input:/flywheel/v0/input:ro " + f"--bind {subject_derivatives_path}/output:/flywheel/v0/output " + f"--bind {subject_derivatives_path}/output/log/config.json:/flywheel/v0/config.json " + f"{container_name} 1>> {logfilename}.o 2>> {logfilename}.e " + ) + + if container == "freesurferator": + logger.info("\n" + "FREESURFERATOR command") + logger.debug( + f"\n the sub is {sub} \n the ses is {ses} \n the analysis dir is {ananlysis_dir}" + ) + cmd = ( + f"{env_cmd}apptainer run --containall --pwd /flywheel/v0 {bind_cmd}" + f"--bind {subject_derivatives_path}/input:/flywheel/v0/input:ro " + f"--bind {subject_derivatives_path}/output:/flywheel/v0/output " + f"--bind {subject_derivatives_path}/work:/flywheel/v0/work " + f"--bind {subject_derivatives_path}/output/log/config.json:/flywheel/v0/config.json " + f"--env PATH=/opt/freesurfer/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/freesurfer/fsfast/bin:/opt/freesurfer/tktools:/opt/freesurfer/mni/bin:/sbin:/bin:/opt/ants/bin " + f"--env LANG=C.UTF-8 " + f"--env GPG_KEY=E3FF2839C048B25C084DEBE9B26995E310250568 " + f"--env PYTHON_VERSION=3.9.15 " + f"--env PYTHON_PIP_VERSION=22.0.4 " + f"--env PYTHON_SETUPTOOLS_VERSION=58.1.0 " + f"--env PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/66030fa03382b4914d4c4d0896961a0bdeeeb274/public/get-pip.py " + f"--env PYTHON_GET_PIP_SHA256=1e501cf004eac1b7eb1f97266d28f995ae835d30250bec7f8850562703067dc6 " + f"--env FLYWHEEL=/flywheel/v0 " + f"--env ANTSPATH=/opt/ants/bin/ " + f"--env FREESURFER_HOME=/opt/freesurfer " + f"--env FREESURFER=/opt/freesurfer " + f"--env DISPLAY=:50.0 " + f"--env FS_LICENSE=/flywheel/v0/work/license.txt " + f"--env OS=Linux " + f"--env FS_OVERRIDE=0 " + f"--env FSF_OUTPUT_FORMAT=nii.gz " + f"--env MNI_DIR=/opt/freesurfer/mni " + f"--env LOCAL_DIR=/opt/freesurfer/local " + f"--env FSFAST_HOME=/opt/freesurfer/fsfast " + f"--env MINC_BIN_DIR=/opt/freesurfer/mni/bin " + f"--env MINC_LIB_DIR=/opt/freesurfer/mni/lib " + f"--env MNI_DATAPATH=/opt/freesurfer/mni/data " + f"--env FMRI_ANALYSIS_DIR=/opt/freesurfer/fsfast " + f"--env PERL5LIB=/opt/freesurfer/mni/lib/perl5/5.8.5 " + f"--env MNI_PERL5LIB=/opt/freesurfer/mni/lib/perl5/5.8.5 " + f"--env XAPPLRESDIR=/opt/freesurfer/MCRv97/X11/app-defaults " + f"--env MCR_CACHE_ROOT=/flywheel/v0/output " + f"--env MCR_CACHE_DIR=/flywheel/v0/output/.mcrCache9.7 " + f"--env FSL_OUTPUT_FORMAT=nii.gz " + f"--env ANTS_VERSION=v2.4.2 " + f"--env QT_QPA_PLATFORM=xcb " + f"--env PWD=/flywheel/v0 " + f"{container_name} " + f"-c python run.py 1> {logfilename}.o 2> {logfilename}.e " + ) + + if container == "rtp2-preproc": + logger.info("\n" + "rtp2-preprc command") + logger.debug( + f"\n the sub is {sub} \n the ses is {ses} \n the analysis dir is {ananlysis_dir}" + ) + + cmd = ( + f"{env_cmd}apptainer run --containall --pwd /flywheel/v0 {bind_cmd}" + f"--bind {subject_derivatives_path}/input:/flywheel/v0/input:ro " + f"--bind {subject_derivatives_path}/output:/flywheel/v0/output " + # f"--bind {subject_derivatives_path}/work:/flywheel/v0/work " + f"--bind {subject_derivatives_path}/output/log/config.json:/flywheel/v0/config.json " + f"--env FLYWHEEL=/flywheel/v0 " + f"--env LD_LIBRARY_PATH=/opt/fsl/lib: " + f"--env FSLWISH=/opt/fsl/bin/fslwish " + f"--env FSLTCLSH=/opt/fsl/bin/fsltclsh " + f"--env FSLMULTIFILEQUIT=TRUE " + f"--env FSLOUTPUTTYPE=NIFTI_GZ " + f"--env FSLDIR=/opt/fsl " + f"--env FREESURFER_HOME=/opt/freesurfer " + f"--env ARTHOME=/opt/art " + f"--env ANTSPATH=/opt/ants/bin " + f"--env PYTHON_GET_PIP_SHA256=1e501cf004eac1b7eb1f97266d28f995ae835d30250bec7f8850562703067dc6 " + f"--env PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/66030fa03382b4914d4c4d0896961a0bdeeeb274/public/get-pip.py " + f"--env PYTHON_PIP_VERSION=22.0.4 " + f"--env PYTHON_VERSION=3.9.15 " + f"--env GPG_KEY=E3FF2839C048B25C084DEBE9B26995E310250568 " + f"--env LANG=C.UTF-8 " + f"--env PATH=/opt/mrtrix3/bin:/opt/ants/bin:/opt/art/bin:/opt/fsl/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin " + f"--env PYTHON_SETUPTOOLS_VERSION=58.1.0 " + f"--env DISPLAY=:50.0 " + f"--env QT_QPA_PLATFORM=xcb " + f"--env FS_LICENSE=/opt/freesurfer/license.txt " + f"--env PWD=/flywheel/v0 " + f"{container_name} " + f"-c python run.py 1> {logfilename}.o 2> {logfilename}.e " + ) + + if container == "rtp2-pipeline": + logger.info("\n" + "rtp2-pipeline command") + logger.debug( + f"\n the sub is {sub} \n the ses is {ses} \n the analysis dir is {ananlysis_dir}" + ) + + cmd = ( + f"{env_cmd}apptainer run --containall --pwd /flywheel/v0 {bind_cmd}" + f"--bind {subject_derivatives_path}/input:/flywheel/v0/input:ro " + f"--bind {subject_derivatives_path}/output:/flywheel/v0/output " + # f"--bind {subject_derivatives_path}/work:/flywheel/v0/work " + f"--bind {subject_derivatives_path}/output/log/config.json:/flywheel/v0/config.json " + f"--env PATH=/opt/mrtrix3/bin:/opt/ants/bin:/opt/art/bin:/opt/fsl/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin " + f"--env LANG=C.UTF-8 " + f"--env GPG_KEY=E3FF2839C048B25C084DEBE9B26995E310250568 " + f"--env PYTHON_VERSION=3.9.15 " + f"--env PYTHON_PIP_VERSION=22.0.4 " + f"--env PYTHON_SETUPTOOLS_VERSION=58.1.0 " + f"--env PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/66030fa03382b4914d4c4d0896961a0bdeeeb274/public/get-pip.py " + f"--env PYTHON_GET_PIP_SHA256=1e501cf004eac1b7eb1f97266d28f995ae835d30250bec7f8850562703067dc6 " + f"--env ANTSPATH=/opt/ants/bin " + f"--env ARTHOME=/opt/art " + f"--env FREESURFER_HOME=/opt/freesurfer " + f"--env FSLDIR=/opt/fsl " + f"--env FSLOUTPUTTYPE=NIFTI_GZ " + f"--env FSLMULTIFILEQUIT=TRUE " + f"--env FSLTCLSH=/opt/fsl/bin/fsltclsh " + f"--env FSLWISH=/opt/fsl/bin/fslwish " + f"--env LD_LIBRARY_PATH=/opt/mcr/v99/runtime/glnxa64:/opt/mcr/v99/bin/glnxa64:/opt/mcr/v99/sys/os/glnxa64:/opt/mcr/v99/extern/bin/glnxa64:/opt/fsl/lib: " + f"--env FLYWHEEL=/flywheel/v0 " + f"--env TEMPLATES=/templates " + f"--env XAPPLRESDIR=/opt/mcr/v99/X11/app-defaults " + f"--env MCR_CACHE_FOLDER_NAME=/flywheel/v0/output/.mcrCache9.9 " + f"--env MCR_CACHE_ROOT=/flywheel/v0/output " + f"--env MRTRIX_TMPFILE_DIR=/flywheel/v0/output/tmp " + f"--env PWD=/flywheel/v0 " + f"{container_name} " + f"-c python run.py 1> {logfilename}.o 2> {logfilename}.e " + ) + + # If after all configuration, we do not have command, raise an error + if cmd is None: + logger.error( + "\n" + + f"the DWI PIPELINE command is not assigned, please check your config.yaml[general][host] session\n" + ) + raise ValueError("cmd is not defined, aborting") + + + return cmd +def py_command(host): + #env_cmd = "conda init && conda activate votcloc &&" + + # for packaging, I don't think we need this, but for testing from repo, we need BCBL python path + if host == "BCBL": + env_cmd= 'export PYTHONPATH=/bcbl/home/home_n-z/tlei/soft/MRIworkflow/Package/src:$PYTHONPATH && ' + if host == "DIPC": + env_cmd='' + if host == "local": + env_cmd='' + return env_cmd \ No newline at end of file diff --git a/src/launchcontainers/launch.py b/src/launchcontainers/launch.py index 0aa653f..f5a457c 100755 --- a/src/launchcontainers/launch.py +++ b/src/launchcontainers/launch.py @@ -15,340 +15,444 @@ """ import os import os.path as op -import subprocess as sp -from subprocess import Popen -import numpy as np import logging -import math - -# modules in lc - +import dask from bids import BIDSLayout -from dask.distributed import progress +import subprocess -# for package mode, the import needs to import launchcontainer module -from launchcontainers.prepare_inputs import dask_scheduler_config as dsq -from launchcontainers.prepare_inputs import prepare as prepare -from launchcontainers.prepare_inputs import utils as do +#package import +from launchcontainers import utils as do +from launchcontainers import generate_command as gen_cmd +from launchcontainers import dask_scheduler_config as config_dask +from launchcontainers.prepare_inputs import prepare +from launchcontainers.py_pipeline import l1_glm -# for testing mode through , we can use relative import -# from prepare_inputs import dask_scheduler_config as dsq -# from prepare_inputs import prepare as prepare -# from prepare_inputs import utils as do +# repo testing import +import utils as do +import generate_command as gen_cmd +from prepare_inputs import prepare +from py_pipeline import l1_glm +import dask_scheduler_config as config_dask -logger = logging.getLogger("Launchcontainers") +logger = logging.getLogger("Launchcontainers") -# %% launchcontainers -def generate_cmd( - lc_config, sub, ses, ananlysis_dir, lst_container_specific_configs, run_lc +def prepare_dask_futures( + analysis_dir, + lc_config, + sub_ses_list, + dict_store_cs_configs ): - """Puts together the command to send to the container. + """ + This function have 2 function + 1. prepare the command and print it + 2. append the command into a list for dask to gather them and launch them Args: - lc_config (str): _description_ - sub (str): _description_ - ses (str): _description_ - ananlysis_dir (str): _description_ - lst_container_specific_configs (list): _description_ - run_lc (str): _description_ - - Raises: - ValueError: Raised in presence of a faulty config.yaml file, or when the formed command is not recognized. - - Returns: - _type_: _description_ + analysis_dir (str): _description_ + lc_config (str): path to launchcontainer config.yaml file + sub_ses_list (_type_): parsed CSV containing the subject list to be analyzed, and the analysis options + parser_namespace (argparse.Namespace): command line arguments """ + logger.info("\n" + "#####################################################\n") - # Relevant directories - # All other relevant directories stem from this one - basedir = lc_config["general"]["basedir"] - - homedir = os.path.join(basedir, "singularity_home") - container = lc_config["general"]["container"] + # Get the host and jobqueue config info from the config.yaml file + container=lc_config["general"]["container"] + containerdir=lc_config["general"]["containerdir"] host = lc_config["general"]["host"] - containerdir = lc_config["general"]["containerdir"] - - # Information relevant to the host and container jobqueue_config = lc_config["host_options"][host] - version = lc_config["container_specific"][container]["version"] - use_module = jobqueue_config["use_module"] - bind_options = jobqueue_config["bind_options"] + logger.debug(f"\n This is the job_queue config {jobqueue_config}") + daskworker_logdir = os.path.join(analysis_dir, "daskworker_log") + launch_mode=jobqueue_config['launch_mode'] + # Count how many jobs we need to launch from sub_ses_list + n_jobs = sub_ses_list.shape[0] + # n_worker should be constrained by n_cores you have in total and core_per_worker + # add a check here + total_core_avail=jobqueue_config["n_cores"] + threads_per_worker=jobqueue_config["threads_per_worker"] + # Calculate the optimal number of workers + optimal_n_workers = min(total_core_avail // threads_per_worker, n_jobs) + + # Echo the command + # if in local and you want to do it serially + # you don't need set up dask LocalCluster or dask Client, you just set up the scheduler to synchronous and use dask.compute() to compute the job + # import dask dask.config.set(scheduler='synchronous') # overwrite default with single-threaded scheduler + # if in local and you want to do it parallel, you need to consider total cores and also you number of jobs + # if in SGE or SLURM, you will never do it in serial, so you will use job-queue to set up your job scripts + logger.critical( + f"\n Launchcontainers.py was run in PREPARATION mode (without option --run_lc)\n" + f"Please check that: \n" + f" (1) launchcontainers.py prepared the input data properly\n" + f" (2) the command created for each subject is properly formed\n" + f" (you can copy the command for one subject and launch it " + f"on the prompt before you launch multiple subjects\n" + f" (3) Once the check is done, launch the jobs by adding --run_lc to the first command you executed.\n" + ) + launch_mode=jobqueue_config['launch_mode'] + if "local" in jobqueue_config["manager"] and launch_mode=="serial": + dask.config.set(scheduler="single-threaded") + else: + # If the host is not local, print the job script to be launched in the cluster. + client, cluster = config_dask.dask_scheduler(jobqueue_config, optimal_n_workers, daskworker_logdir) + current_scheduler = dask.config.get('scheduler') + + + logger.info(f"The scheduler after in the launch is {dask.config.get('scheduler')} ") + + if host != "local": + logger.critical( + f"The cluster job script for this command is:\n" + f"{cluster.job_script()}" + ) + client.close() + cluster.close() + logger.info(f"Client is {client} \n Cluster is {cluster}") + + elif host == "local": + if launch_mode == "parallel": + logger.critical( + f"The cluster job script for this command is:\n" + f"{cluster}" + ) + client.close() + cluster.close() + else: + logger.critical( + f"Your launch_mode is {launch_mode}, it will not controlled by dask but go ahead" + ) - # Location of the Singularity Image File (.sif) - container_name = os.path.join(containerdir, f"{container}_{version}.sif") - # Define the directory and the file name to output the log of each subject - logdir = os.path.join(ananlysis_dir, "sub-" + sub, "ses-" + ses, "output", "log") - logfilename = f"{logdir}/t-{container}-sub-{sub}_ses-{ses}" + + # Iterate over the provided subject list + + if container in [ + "anatrois", + "rtppreproc", + "rtp-pipeline", + "freesurferator", + "rtp2-preproc", + "rtp2-pipeline" + ]: + future_dict={} + future_dict['optimal_n_workers']=optimal_n_workers + future_dict['container']=container + future_dict['logdir']=logdir + lc_configs = [] + subs = [] + sess = [] + analysis_dirs = [] + commands = [] + for row in sub_ses_list.itertuples(index=True, name="Pandas"): + sub = row.sub + ses = row.ses - path_to_sub_derivatives = os.path.join(ananlysis_dir, f"sub-{sub}", f"ses-{ses}") + # Append config, subject, session, and path info in corresponding lists + lc_configs.append(lc_config) + subs.append(sub) + sess.append(ses) + analysis_dirs.append(analysis_dir) - bind_cmd = "" - for bind in bind_options: - bind_cmd += f"--bind {bind}:{bind} " + # This cmd is only for print the command + command = gen_cmd.dwi_command( + lc_config, + sub, + ses, + analysis_dir + ) + commands.append(command) + + logger.critical( + f"\nCOMMAND for subject-{sub}, and session-{ses}:\n" + f"{command}\n\n" + ) + + future_dict['lc_configs']=lc_configs + future_dict['subs']=subs + future_dict['sess']=sess + future_dict['analysis_dirs']=analysis_dirs + future_dict['commands']=commands + + elif container in ['l1_glm']: + + commands=[] + future_dict={} + future_dict['optimal_n_workers']=optimal_n_workers + future_dict['container']=container + future_dict['logdir']=logdir + env_cmd=gen_cmd.py_command(host) + import pkg_resources + + py_pipeline_dir = pkg_resources.resource_filename('launchcontainers', 'py_pipeline') + + for row in sub_ses_list.itertuples(index=True, name="Pandas"): + sub = row.sub + ses = row.ses + + command= f"{env_cmd}python {py_pipeline_dir}/l1_glm.py --subject {sub} --session {ses} --lc_config {dict_store_cs_configs['lc_yaml_path']} --l1_glm_yaml {dict_store_cs_configs['config_path']} " + commands.append(command) + logger.critical( + f"\nCOMMAND for subject-{sub}, and session-{ses}:\n" + f"{command}\n\n" + ) + future_dict['commands']=commands + return future_dict +def run_command(cmd, cmd_id): + """ + Run a Singularity (Apptainer) command using subprocess and log stdout and stderr. + Args: + cmd (str): The Singularity command to run. + cmd_id (int): Unique identifier for the command to distinguish log entries. + Returns: + tuple: stdout and stderr of the command. + """ + logger.info(f"Executing command {cmd_id}: {cmd}") + process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) - env_cmd = "" - if host == "local": - if use_module == True: - env_cmd = f"module load {jobqueue_config['apptainer']} &&" + stdout, stderr = process.communicate() - if container in ["anatrois", "rtppreproc", "rtp-pipeline"]: - logger.info("\n" + "start to generate the DWI PIPELINE command") - logger.debug( - f"\n the sub is {sub} \n the ses is {ses} \n the analysis dir is {ananlysis_dir}" - ) + if stdout: + logger.info(f"Command {cmd_id} stdout: {stdout.strip()}") + if stderr: + logger.error(f"Command {cmd_id} stderr: {stderr.strip()}") - cmd = ( - f"{env_cmd} singularity run -e --no-home {bind_cmd}" - f"--bind {path_to_sub_derivatives}/input:/flywheel/v0/input:ro " - f"--bind {path_to_sub_derivatives}/output:/flywheel/v0/output " - f"--bind {path_to_sub_derivatives}/output/log/config.json:/flywheel/v0/config.json " - f"{container_name} 1>> {logfilename}.o 2>> {logfilename}.e " + return stdout.strip(), stderr.strip() +def run_dask( + jobqueue_config, + future_dict + ): + + optimal_n_workers=future_dict['optimal_n_workers'] + commands=future_dict['commands'] + launch_mode=jobqueue_config['launch_mode'] + logdir=future_dict['logdir'] + if "local" in jobqueue_config["manager"] and launch_mode=="serial" : + dask.config.set(scheduler="single-threaded") + tasks=[dask.delayed(run_command)(cmd,i) for i, cmd in enumerate(commands)] + results=dask.compute(*tasks) + # Print the results + print("Results:", results) + for stdout, stderr in results: + if stdout: + logger.info(f"Container stdout: {stdout}") + if stderr: + logger.error(f"Container stderr: {stderr}") + logger.critical(f"launchcontainer is running in local serial mode") + else: + # If the host is not local, print the job script to be launched in the cluster. + client, cluster = config_dask.dask_scheduler(jobqueue_config, optimal_n_workers, logdir) + logger.info( + "---this is the cluster and client\n" + f"{client} \n cluster: {cluster} \n" ) - if container == "freesurferator": - logger.info("\n" + "FREESURFERATOR command") - logger.debug( - f"\n the sub is {sub} \n the ses is {ses} \n the analysis dir is {ananlysis_dir}" - ) + # Compose the command to run in the cluster + futures = client.map(run_command,commands, range(len(commands))) + logger.info("Dask dashboard is available at:", cluster.dashboard_link) - cmd = ( - f"{env_cmd} apptainer run --containall --pwd /flywheel/v0 {bind_cmd}" - f"--bind {path_to_sub_derivatives}/input:/flywheel/v0/input:ro " - f"--bind {path_to_sub_derivatives}/output:/flywheel/v0/output " - f"--bind {path_to_sub_derivatives}/work:/flywheel/v0/work " - f"--bind {path_to_sub_derivatives}/output/log/config.json:/flywheel/v0/config.json " - f"--env PATH=/opt/freesurfer/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/freesurfer/fsfast/bin:/opt/freesurfer/tktools:/opt/freesurfer/mni/bin:/sbin:/bin:/opt/ants/bin " - f"--env LANG=C.UTF-8 " - f"--env GPG_KEY=E3FF2839C048B25C084DEBE9B26995E310250568 " - f"--env PYTHON_VERSION=3.9.15 " - f"--env PYTHON_PIP_VERSION=22.0.4 " - f"--env PYTHON_SETUPTOOLS_VERSION=58.1.0 " - f"--env PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/66030fa03382b4914d4c4d0896961a0bdeeeb274/public/get-pip.py " - f"--env PYTHON_GET_PIP_SHA256=1e501cf004eac1b7eb1f97266d28f995ae835d30250bec7f8850562703067dc6 " - f"--env FLYWHEEL=/flywheel/v0 " - f"--env ANTSPATH=/opt/ants/bin/ " - f"--env FREESURFER_HOME=/opt/freesurfer " - f"--env FREESURFER=/opt/freesurfer " - f"--env DISPLAY=:50.0 " - f"--env FS_LICENSE=/flywheel/v0/work/license.txt " - f"--env OS=Linux " - f"--env FS_OVERRIDE=0 " - f"--env FSF_OUTPUT_FORMAT=nii.gz " - f"--env MNI_DIR=/opt/freesurfer/mni " - f"--env LOCAL_DIR=/opt/freesurfer/local " - f"--env FSFAST_HOME=/opt/freesurfer/fsfast " - f"--env MINC_BIN_DIR=/opt/freesurfer/mni/bin " - f"--env MINC_LIB_DIR=/opt/freesurfer/mni/lib " - f"--env MNI_DATAPATH=/opt/freesurfer/mni/data " - f"--env FMRI_ANALYSIS_DIR=/opt/freesurfer/fsfast " - f"--env PERL5LIB=/opt/freesurfer/mni/lib/perl5/5.8.5 " - f"--env MNI_PERL5LIB=/opt/freesurfer/mni/lib/perl5/5.8.5 " - f"--env XAPPLRESDIR=/opt/freesurfer/MCRv97/X11/app-defaults " - f"--env MCR_CACHE_ROOT=/flywheel/v0/output " - f"--env MCR_CACHE_DIR=/flywheel/v0/output/.mcrCache9.7 " - f"--env FSL_OUTPUT_FORMAT=nii.gz " - f"--env ANTS_VERSION=v2.4.2 " - f"--env QT_QPA_PLATFORM=xcb " - f"--env PWD=/flywheel/v0 " - f"{container_name} " - f"-c python run.py 1> {logfilename}.o 2> {logfilename}.e " - ) + # Wait for all jobs to complete + results = client.gather(futures) + # Print job results + for result in results: + print(result) + for stdout, stderr in results: + if stdout: + logger.info(f"Container stdout: {stdout}") + if stderr: + logger.error(f"Container stderr: {stderr}") + client.close() + cluster.close() - if container == "rtp2-preproc": - logger.info("\n" + "rtp2-preprc command") - logger.debug( - f"\n the sub is {sub} \n the ses is {ses} \n the analysis dir is {ananlysis_dir}" - ) + logger.critical("\n" + "launchcontainer finished, all the jobs are done") - cmd = ( - f"{env_cmd} apptainer run --containall --pwd /flywheel/v0 {bind_cmd}" - f"--bind {path_to_sub_derivatives}/input:/flywheel/v0/input:ro " - f"--bind {path_to_sub_derivatives}/output:/flywheel/v0/output " - # f"--bind {path_to_sub_derivatives}/work:/flywheel/v0/work " - f"--bind {path_to_sub_derivatives}/output/log/config.json:/flywheel/v0/config.json " - f"--env FLYWHEEL=/flywheel/v0 " - f"--env LD_LIBRARY_PATH=/opt/fsl/lib: " - f"--env FSLWISH=/opt/fsl/bin/fslwish " - f"--env FSLTCLSH=/opt/fsl/bin/fsltclsh " - f"--env FSLMULTIFILEQUIT=TRUE " - f"--env FSLOUTPUTTYPE=NIFTI_GZ " - f"--env FSLDIR=/opt/fsl " - f"--env FREESURFER_HOME=/opt/freesurfer " - f"--env ARTHOME=/opt/art " - f"--env ANTSPATH=/opt/ants/bin " - f"--env PYTHON_GET_PIP_SHA256=1e501cf004eac1b7eb1f97266d28f995ae835d30250bec7f8850562703067dc6 " - f"--env PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/66030fa03382b4914d4c4d0896961a0bdeeeb274/public/get-pip.py " - f"--env PYTHON_PIP_VERSION=22.0.4 " - f"--env PYTHON_VERSION=3.9.15 " - f"--env GPG_KEY=E3FF2839C048B25C084DEBE9B26995E310250568 " - f"--env LANG=C.UTF-8 " - f"--env PATH=/opt/mrtrix3/bin:/opt/ants/bin:/opt/art/bin:/opt/fsl/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin " - f"--env PYTHON_SETUPTOOLS_VERSION=58.1.0 " - f"--env DISPLAY=:50.0 " - f"--env QT_QPA_PLATFORM=xcb " - f"--env FS_LICENSE=/opt/freesurfer/license.txt " - f"--env PWD=/flywheel/v0 " - f"{container_name} " - f"-c python run.py 1> {logfilename}.o 2> {logfilename}.e " - ) + +# %% main() +def main(): + parser_namespace,parse_dict = do.get_parser() + copy_configs=parser_namespace.copy_configs + # Check if download_configs argument is provided + if copy_configs: + # Ensure the directory exists + if not os.path.exists(copy_configs): + os.makedirs(copy_configs) + launchcontainers_version = do.copy_configs(copy_configs) + # # Use the mocked version function for testing + # launchcontainers_version = do.get_mocked_launchcontainers_version() + + # if launchcontainers_version is None: + # raise ValueError("Unable to determine launchcontainers version.") + # do.download_configs(launchcontainers_version, download_configs) + else: + # Proceed with normal main functionality + print("Executing main functionality with arguments") + # Your main function logic here + # e.g., launch_container(args.other_arg) + # read ymal and setup the launchcontainer program + parser_namespace,_ = do.get_parser() + lc_config_path = parser_namespace.lc_config + lc_config = do.read_yaml(lc_config_path) + run_lc = parser_namespace.run_lc + verbose = parser_namespace.verbose + debug = parser_namespace.debug + # Get general information from the config.yaml file + basedir=lc_config["general"]["basedir"] + bidsdir_name=lc_config["general"]["bidsdir_name"] + container=lc_config["general"]["container"] + analysis_name=lc_config["general"]["analysis_name"] + host=lc_config["general"]["host"] + print_command_only=lc_config["general"]["print_command_only"] + log_dir=lc_config["general"]["log_dir"] + log_filename=lc_config["general"]["log_filename"] - if container == "rtp2-pipeline": - logger.info("\n" + "rtp2-pipeline command") - logger.debug( - f"\n the sub is {sub} \n the ses is {ses} \n the analysis dir is {ananlysis_dir}" - ) + version = lc_config["container_specific"][container]["version"] + jobqueue_config = lc_config["host_options"][host] + # get stuff from subseslist for future jobs scheduling + sub_ses_list_path = parser_namespace.sub_ses_list + sub_ses_list,num_of_true_run = do.read_df(sub_ses_list_path) - cmd = ( - f"{env_cmd} apptainer run --containall --pwd /flywheel/v0 {bind_cmd}" - f"--bind {path_to_sub_derivatives}/input:/flywheel/v0/input:ro " - f"--bind {path_to_sub_derivatives}/output:/flywheel/v0/output " - # f"--bind {path_to_sub_derivatives}/work:/flywheel/v0/work " - f"--bind {path_to_sub_derivatives}/output/log/config.json:/flywheel/v0/config.json " - f"--env PATH=/opt/mrtrix3/bin:/opt/ants/bin:/opt/art/bin:/opt/fsl/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin " - f"--env LANG=C.UTF-8 " - f"--env GPG_KEY=E3FF2839C048B25C084DEBE9B26995E310250568 " - f"--env PYTHON_VERSION=3.9.15 " - f"--env PYTHON_PIP_VERSION=22.0.4 " - f"--env PYTHON_SETUPTOOLS_VERSION=58.1.0 " - f"--env PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/66030fa03382b4914d4c4d0896961a0bdeeeb274/public/get-pip.py " - f"--env PYTHON_GET_PIP_SHA256=1e501cf004eac1b7eb1f97266d28f995ae835d30250bec7f8850562703067dc6 " - f"--env ANTSPATH=/opt/ants/bin " - f"--env ARTHOME=/opt/art " - f"--env FREESURFER_HOME=/opt/freesurfer " - f"--env FSLDIR=/opt/fsl " - f"--env FSLOUTPUTTYPE=NIFTI_GZ " - f"--env FSLMULTIFILEQUIT=TRUE " - f"--env FSLTCLSH=/opt/fsl/bin/fsltclsh " - f"--env FSLWISH=/opt/fsl/bin/fslwish " - f"--env LD_LIBRARY_PATH=/opt/mcr/v99/runtime/glnxa64:/opt/mcr/v99/bin/glnxa64:/opt/mcr/v99/sys/os/glnxa64:/opt/mcr/v99/extern/bin/glnxa64:/opt/fsl/lib: " - f"--env FLYWHEEL=/flywheel/v0 " - f"--env TEMPLATES=/templates " - f"--env XAPPLRESDIR=/opt/mcr/v99/X11/app-defaults " - f"--env MCR_CACHE_FOLDER_NAME=/flywheel/v0/output/.mcrCache9.9 " - f"--env MCR_CACHE_ROOT=/flywheel/v0/output " - f"--env MRTRIX_TMPFILE_DIR=/flywheel/v0/output/tmp " - f"--env PWD=/flywheel/v0 " - f"{container_name} " - f"-c python run.py 1> {logfilename}.o 2> {logfilename}.e " - ) + if log_dir=="analysis_dir": + log_dir=op.join(basedir,bidsdir_name,'derivatives',f'{container}_{version}',f"analysis-{analysis_name}") + + do.setup_logger(print_command_only,verbose, debug, log_dir, log_filename) - # Check which container we are using, and define the command accordingly - if container == "fmriprep": - logger.info("\n" + f"start to generate the FMRIPREP command") - - nthreads = lc_config["container_specific"][container]["nthreads"] - mem = lc_config["container_specific"][container]["mem"] - fs_license = lc_config["container_specific"][container]["fs_license"] - containerdir = lc_config["general"]["containerdir"] - container_path = os.path.join( - containerdir, - f"{container}_{lc_config['container_specific'][container]['version']}.sif", - ) - precommand = f"mkdir -p {homedir}; " f"unset PYTHONPATH; " - if "local" == host: - cmd = ( - precommand + f"singularity run " - f"-H {homedir} " - f"-B {basedir}:/base -B {fs_license}:/license " - f"--cleanenv {container_path} " - f"-w {ananlysis_dir} " - f"/base/BIDS {ananlysis_dir} participant " - f"--participant-label sub-{sub} " - f"--skip-bids-validation " - f"--output-spaces func fsnative fsaverage T1w MNI152NLin2009cAsym " - f"--dummy-scans 0 " - f"--use-syn-sdc " - f"--fs-license-file /license/license.txt " - f"--nthreads {nthreads} " - f"--omp-nthreads {nthreads} " - f"--stop-on-first-crash " - f"--mem_mb {(mem*1000)-5000} " + # logger the settings + if host == "local": + launch_mode = lc_config["host_options"]["local"]["launch_mode"] + valid_options = ["serial", "parallel","dask_worker"] + if launch_mode in valid_options: + host_str = ( + f"{host}, \n and commands will be launched in {launch_mode} mode \n" + f"Serial is safe but it will take longer. " + f"If you launch in parallel be aware that some of the " + f"processes might be killed if the limit (usually memory) " + f"of the machine is reached. " ) - if host in ["BCBL", "DIPC"]: - cmd = ( - precommand + f"singularity run " - f"-H {homedir} " - f"-B {basedir}:/base -B {fs_license}:/license " - f"--cleanenv {container_path} " - f"-w {ananlysis_dir} " - f"/base/BIDS {ananlysis_dir} participant " - f"--participant-label sub-{sub} " - f"--skip-bids-validation " - f"--output-spaces func fsnative fsaverage T1w MNI152NLin2009cAsym " - f"--dummy-scans 0 " - f"--use-syn-sdc " - f"--fs-license-file /license/license.txt " - f"--nthreads {nthreads} " - f"--omp-nthreads {nthreads} " - f"--stop-on-first-crash " - f"--mem_mb {(mem*1000)-5000} " + else: + do.die( + f"local:launch_mode {launch_mode} was passed, valid options are {valid_options}" ) + else: + host_str=f" host is {host}" + logger.critical( + "\n" + + "#####################################################\n" + + f"Successfully read the config file {lc_config_path} \n" + + f"SubsesList is read, there are {num_of_true_run} jobs needed to be launched" + + f'Basedir is: {lc_config["general"]["basedir"]} \n' + + f'Container is: {container}_{lc_config["container_specific"][container]["version"]} \n' + + f"Host is: {host_str} \n" + + f'analysis folder is: {lc_config["general"]["analysis_name"]} \n' + + f"##################################################### \n" + ) - if container in ["prfprepare", "prfreport", "prfanalyze-vista"]: - config_name = lc_config["container_specific"][container]["config_name"] - homedir = os.path.join(basedir, "singularity_home") - container_path = os.path.join( - containerdir, - f"{container}_{lc_config['container_specific'][container]['version']}.sif", - ) - if host in ["BCBL", "DIPC"]: - cmd = ( - "unset PYTHONPATH; " - f"singularity run " - f"-H {homedir} " - f"-B {basedir}/derivatives/fmriprep:/flywheel/v0/input " - f"-B {ananlysis_dir}:/flywheel/v0/output " - f"-B {basedir}/BIDS:/flywheel/v0/BIDS " - f"-B {ananlysis_dir}/{config_name}.json:/flywheel/v0/config.json " - f"-B {basedir}/license/license.txt:/opt/freesurfer/.license " - f"--cleanenv {container_path} " - ) - elif host == "local": - cmd = ( - "unset PYTHONPATH; " - f"singularity run " - f"-H {homedir} " - f"-B {basedir}/derivatives/fmriprep:/flywheel/v0/input " - f"-B {ananlysis_dir}:/flywheel/v0/output " - f"-B {basedir}/BIDS:/flywheel/v0/BIDS " - f"-B {ananlysis_dir}/{config_name}.json:/flywheel/v0/config.json " - f"-B {basedir}/license/license.txt:/opt/freesurfer/.license " - f"--cleanenv {container_path} " - ) - # If after all configuration, we do not have command, raise an error - if cmd is None: - logger.error( - "\n" - + f"the DWI PIPELINE command is not assigned, please check your config.yaml[general][host] session\n" + + + # Prepare file and launch containers + # First of all prepare the analysis folder: it create you the analysis folder automatically so that you are not messing up with different analysis + analysis_dir, dict_store_cs_configs = ( + prepare.prepare_analysis_folder(parser_namespace, lc_config) + ) + container_configs_under_analysis_folder=dict_store_cs_configs['config_path'] + + logger.info("Reading the BIDS layout...") + layout = BIDSLayout(os.path.join(basedir, bidsdir_name)) + logger.info("finished reading the BIDS layout.") + + # Prepare mode + # if DWI Pipeline (preproc, pipeline) + if container in [ + "anatrois", + "rtppreproc", + "rtp-pipeline", + "freesurferator", + "rtp2-preproc", + "rtp2-pipeline" + ]: + logger.debug(f"{container} is in the list") + sub_ses_list= sub_ses_list[(sub_ses_list['dwi'] == "True") & (sub_ses_list['RUN'] == "True")] + prepare.prepare_dwi_input( + parser_namespace, analysis_dir, lc_config, sub_ses_list, layout, dict_store_cs_configs ) - raise ValueError("cmd is not defined, aborting") + + future_dict= prepare_dask_futures( + analysis_dir, + lc_config, + sub_ses_list, + dict_store_cs_configs + ) + elif container in ["l1_glm"]: + sub_ses_list= sub_ses_list[(sub_ses_list['func'] == "True") & (sub_ses_list['RUN'] == "True")] + # do I need a function in prepare? + future_dict= prepare_dask_futures( + analysis_dir, + lc_config, + sub_ses_list, + dict_store_cs_configs + ) + elif container in ["fmriprep"]: + sub_ses_list= sub_ses_list["only have the subs, because it will not get the sessions"] + else: + logger.error(f"{container} is not in the list") + raise KeyError("The container name you input is not supported, can't do prepare or launch jobs") + - # GLU: I don't think this is right, run is done below, I will make it work just for local but not in here, - # it is good that this function just creates the cmd, I would keep it like that if run_lc: - return(sp.run(cmd, shell = True)) - else: - return cmd - # sp.run(cmd, shell=True) - #return cmd + run_dask(jobqueue_config,future_dict) + + +if __name__ == "__main__": + main() +# -*- coding: utf-8 -*- +""" +MIT License + +Copyright (c) 2020-2024 Garikoitz Lerma-Usabiaga +Copyright (c) 2020-2022 Mengxing Liu +Copyright (c) 2022-2024 Leandro Lecca +Copyright (c) 2022-2024 Yongning Lei +Copyright (c) 2023 David Linhardt +Copyright (c) 2023 Iñigo Tellaetxe + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +""" +import os +import os.path as op +import logging +import dask +from bids import BIDSLayout +import subprocess + +#package import +from launchcontainers import utils as do +from launchcontainers import generate_command as gen_cmd +from launchcontainers import dask_scheduler_config as config_dask +from launchcontainers.prepare_inputs import prepare +from launchcontainers.py_pipeline import l1_glm + + +# repo testing import +import utils as do +import generate_command as gen_cmd +from prepare_inputs import prepare +from py_pipeline import l1_glm +import dask_scheduler_config as config_dask + + +logger = logging.getLogger("Launchcontainers") -# %% the launchcontainer -def launchcontainer( - ananlysis_dir, +def prepare_dask_futures( + analysis_dir, lc_config, sub_ses_list, - parser_namespace, - path_to_analysis_container_specific_config + dict_store_cs_configs ): """ - This function launches containers generically in different Docker/Singularity HPCs - This function is going to assume that all files are where they need to be. + This function have 2 function + 1. prepare the command and print it + 2. append the command into a list for dask to gather them and launch them Args: - ananlysis_dir (str): _description_ + analysis_dir (str): _description_ lc_config (str): path to launchcontainer config.yaml file sub_ses_list (_type_): parsed CSV containing the subject list to be analyzed, and the analysis options parser_namespace (argparse.Namespace): command line arguments @@ -356,214 +460,245 @@ def launchcontainer( logger.info("\n" + "#####################################################\n") # Get the host and jobqueue config info from the config.yaml file + container=lc_config["general"]["container"] + containerdir=lc_config["general"]["containerdir"] host = lc_config["general"]["host"] jobqueue_config = lc_config["host_options"][host] - if host == "local": - launch_mode = jobqueue_config["launch_mode"] - logger.debug(f"\n,, this is the job_queue config {jobqueue_config}") - - force = lc_config["general"]["force"] - logdir = os.path.join(ananlysis_dir, "daskworker_log") - + logger.debug(f"\n This is the job_queue config {jobqueue_config}") + logdir = os.path.join(analysis_dir, "daskworker_log") + launch_mode=jobqueue_config['launch_mode'] # Count how many jobs we need to launch from sub_ses_list - n_jobs = np.sum(sub_ses_list.RUN == "True") - - run_lc = parser_namespace.run_lc - - lc_configs = [] - subs = [] - sess = [] - dir_analysiss = [] - paths_to_analysis_config_json = [] - run_lcs = [] - # PREPARATION mode - if not run_lc: + n_jobs = sub_ses_list.shape[0] + # n_worker should be constrained by n_cores you have in total and core_per_worker + # add a check here + total_core_avail=jobqueue_config["n_cores"] + threads_per_worker=jobqueue_config["threads_per_worker"] + # Calculate the optimal number of workers + optimal_n_workers = min(total_core_avail // threads_per_worker, n_jobs) + + # Echo the command + # if in local and you want to do it serially + # you don't need set up dask LocalCluster or dask Client, you just set up the scheduler to synchronous and use dask.compute() to compute the job + # import dask dask.config.set(scheduler='synchronous') # overwrite default with single-threaded scheduler + # if in local and you want to do it parallel, you need to consider total cores and also you number of jobs + # if in SGE or SLURM, you will never do it in serial, so you will use job-queue to set up your job scripts + logger.critical( + f"\n Launchcontainers.py was run in PREPARATION mode (without option --run_lc)\n" + f"Please check that: \n" + f" (1) launchcontainers.py prepared the input data properly\n" + f" (2) the command created for each subject is properly formed\n" + f" (you can copy the command for one subject and launch it " + f"on the prompt before you launch multiple subjects\n" + f" (3) Once the check is done, launch the jobs by adding --run_lc to the first command you executed.\n" + ) + launch_mode=jobqueue_config['launch_mode'] + if "local" in jobqueue_config["manager"] and launch_mode=="serial": + dask.config.set(scheduler="single-threaded") + else: + # If the host is not local, print the job script to be launched in the cluster. + client, cluster = config_dask.dask_scheduler(jobqueue_config, optimal_n_workers, logdir) + current_scheduler = dask.config.get('scheduler') + + + logger.info(f"The scheduler after in the launch is {dask.config.get('scheduler')} ") + + if host != "local": logger.critical( - f"\nlaunchcontainers.py was run in PREPARATION mode (without option --run_lc)\n" - f"Please check that: \n" - f" (1) launchcontainers.py prepared the input data properly\n" - f" (2) the command created for each subject is properly formed\n" - f" (you can copy the command for one subject and launch it " - f"on the prompt before you launch multiple subjects\n" - f" (3) Once the check is done, launch the jobs by adding --run_lc to the first command you executed.\n" + f"The cluster job script for this command is:\n" + f"{cluster.job_script()}" ) - # If the host is not local, print the job script to be launched in the cluster. - if host != "local" or (host == "local" and launch_mode == "dask_worker"): - client, cluster = create_cluster_client(jobqueue_config, n_jobs, logdir) - if host != "local": - logger.critical( - f"The cluster job script for this command is:\n" - f"{cluster.job_script()}" - ) - elif host == "local" and launch_mode == "dask_worker": - logger.critical( - f"The cluster job script for this command is:\n" - f"{cluster}" - ) + client.close() + cluster.close() + logger.info(f"Client is {client} \n Cluster is {cluster}") + + elif host == "local": + if launch_mode == "parallel": + logger.critical( + f"The cluster job script for this command is:\n" + f"{cluster}" + ) + client.close() + cluster.close() + else: + logger.critical( + f"Your launch_mode is {launch_mode}, it will not controlled by dask but go ahead" + ) + + # Iterate over the provided subject list - commands = list() - for row in sub_ses_list.itertuples(index=True, name="Pandas"): - sub = row.sub - ses = row.ses - RUN = row.RUN - dwi = row.dwi - - if RUN == "True": + + if container in [ + "anatrois", + "rtppreproc", + "rtp-pipeline", + "freesurferator", + "rtp2-preproc", + "rtp2-pipeline" + ]: + future_dict={} + future_dict['optimal_n_workers']=optimal_n_workers + future_dict['container']=container + future_dict['logdir']=logdir + lc_configs = [] + subs = [] + sess = [] + analysis_dirs = [] + commands = [] + for row in sub_ses_list.itertuples(index=True, name="Pandas"): + sub = row.sub + ses = row.ses + # Append config, subject, session, and path info in corresponding lists lc_configs.append(lc_config) subs.append(sub) sess.append(ses) - dir_analysiss.append(ananlysis_dir) - paths_to_analysis_config_json.append( - path_to_analysis_container_specific_config[0] - ) - run_lcs.append(run_lc) + analysis_dirs.append(analysis_dir) # This cmd is only for print the command - command = generate_cmd( + command = gen_cmd.dwi_command( lc_config, sub, ses, - ananlysis_dir, - path_to_analysis_container_specific_config, - False # set to False to print the command + analysis_dir ) commands.append(command) - if not run_lc: - logger.critical( - f"\nCOMMAND for subject-{sub}, and session-{ses}:\n" - f"{command}\n\n" - ) - - if not run_lc and lc_config["general"]["container"] == "fmriprep": - logger.critical( - f"\n" - f"fmriprep now can not deal with session specification, " - f"so the analysis are running on all sessions of the " - f"subject you are specifying" - ) - - # RUN mode - if run_lc and host != "local": - run_dask( - jobqueue_config, - n_jobs, - logdir, - lc_configs, - subs, - sess, - dir_analysiss, - paths_to_analysis_config_json, - run_lcs - ) - - if run_lc and host == "local": - if launch_mode == "parallel": - k = 0 - njobs = jobqueue_config["njobs"] - if njobs == "" or njobs is None: - njobs = 2 - steps = math.ceil(len(commands)/njobs) - logger.critical( - f"\nLocally launching {len(commands)} jobs in parallel every {njobs} jobs " - f"in {steps} steps, check your server's memory, some jobs might fail\n" - ) - for stp in range(steps): - if stp == range(steps)[-1] and (k+njobs) <= len(commands): - selected_commands = commands[k:len(commands)] - else: - selected_commands = commands[k:k+njobs] - logger.critical( - f"JOBS in step {stp+1}:\n{selected_commands}\n" - ) - procs = [ Popen(i, shell=True) for i in selected_commands ] - for p in procs: - p.wait() - k = k+njobs - - elif launch_mode == "dask_worker": - logger.critical( - f"\nLocally launching {len(commands)} jobs with dask-worker, " - f" keep an eye on your server's memory\n" - ) - run_dask( - jobqueue_config, - n_jobs, - logdir, - lc_configs, - subs, - sess, - dir_analysiss, - paths_to_analysis_config_json, - run_lcs - ) - elif launch_mode == "serial": # Run this with dask... + logger.critical( - f"Locally launching {len(commands)} jobs in series, this might take a lot of time" + f"\nCOMMAND for subject-{sub}, and session-{ses}:\n" + f"{command}\n\n" ) - serial_cmd = "" - for i, cmd in enumerate(commands): - if i == 0: - serial_cmd = cmd - else: - serial_cmd += f" && {cmd}" + + future_dict['lc_configs']=lc_configs + future_dict['subs']=subs + future_dict['sess']=sess + future_dict['analysis_dirs']=analysis_dirs + future_dict['commands']=commands + + elif container in ['l1_glm']: + + commands=[] + future_dict={} + future_dict['optimal_n_workers']=optimal_n_workers + future_dict['container']=container + future_dict['logdir']=logdir + env_cmd=gen_cmd.py_command(host) + import pkg_resources + + py_pipeline_dir = pkg_resources.resource_filename('launchcontainers', 'py_pipeline') + + for row in sub_ses_list.itertuples(index=True, name="Pandas"): + sub = row.sub + ses = row.ses + + command= f"{env_cmd}python {py_pipeline_dir}/l1_glm.py --subject {sub} --session {ses} --lc_config {dict_store_cs_configs['lc_yaml_path']} --l1_glm_yaml {dict_store_cs_configs['config_path']} " + commands.append(command) logger.critical( - f"LAUNCHING SUPER SERIAL {len(commands)} JOBS:\n{serial_cmd}\n" + f"\nCOMMAND for subject-{sub}, and session-{ses}:\n" + f"{command}\n\n" ) - sp.run(serial_cmd, shell=True) + future_dict['commands']=commands + return future_dict - return +def sp_run_cmd(cmd, cmd_id): + """ + Run a Singularity (Apptainer) command using subprocess and log stdout and stderr. + Args: + cmd (str): The Singularity command to run. + cmd_id (int): Unique identifier for the command to distinguish log entries. + Returns: + tuple: stdout and stderr of the command. + """ + logger.info(f"Executing command {cmd_id}: {cmd}") + process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) -def create_cluster_client(jobqueue_config, n_jobs, logdir): - client, cluster = dsq.dask_scheduler(jobqueue_config, n_jobs, logdir) - return client, cluster + stdout, stderr = process.communicate() -def run_dask( - jobqueue_config, - n_jobs, - logdir, - lc_configs, - subs, - sess, - dir_analysiss, - paths_to_analysis_config_json, - run_lcs + if stdout: + logger.info(f"Command {cmd_id} stdout: {stdout.strip()}") + if stderr: + logger.error(f"Command {cmd_id} stderr: {stderr.strip()}") + + return stdout.strip(), stderr.strip() + +def launch_dask_futures( + jobqueue_config, + future_dict ): - client, cluster = create_cluster_client(jobqueue_config, n_jobs, logdir) - logger.info( - "---this is the cluster and client\n" + f"{client} \n cluster: {cluster} \n" - ) - print(subs) - print(sess) - # Compose the command to run in the cluster - futures = client.map( - generate_cmd, - lc_configs, - subs, - sess, - dir_analysiss, - paths_to_analysis_config_json, - run_lcs - ) - # Record the progress - progress(futures) - # Get the info and report it in the logger - results = client.gather(futures) - logger.info(results) - logger.info("###########") - # Close the connection with the client and the cluster, and inform about it - client.close() - cluster.close() + optimal_n_workers=future_dict['optimal_n_workers'] + commands=future_dict['commands'] + launch_mode=jobqueue_config['launch_mode'] + logdir=future_dict['logdir'] + if "local" in jobqueue_config["manager"] and launch_mode=="serial" : + dask.config.set(scheduler="single-threaded") + tasks=[dask.delayed(sp_run_cmd)(cmd,i) for i, cmd in enumerate(commands)] + results=dask.compute(*tasks) + # Print the results + print("Results:", results) + for stdout, stderr in results: + if stdout: + logger.info(f"Container stdout: {stdout}") + if stderr: + logger.error(f"Container stderr: {stderr}") + logger.critical(f"launchcontainer is running in local serial mode") + else: + # If the host is not local, print the job script to be launched in the cluster. + client, cluster = config_dask.dask_scheduler(jobqueue_config, optimal_n_workers, logdir) + logger.info( + "---this is the cluster and client\n" + f"{client} \n cluster: {cluster} \n" + ) + + # Compose the command to run in the cluster + futures = client.map(sp_run_cmd,commands, range(len(commands))) + logger.info("Dask dashboard is available at:", cluster.dashboard_link) - logger.critical("\n" + "launchcontainer finished, all the jobs are done") - #return client, cluster + # Wait for all jobs to complete + results = client.gather(futures) + # Print job results + for result in results: + print(result) + for stdout, stderr in results: + if stdout: + logger.info(f"Container stdout: {stdout}") + if stderr: + logger.error(f"Container stderr: {stderr}") + client.close() + cluster.close() + logger.critical("\n" + "launchcontainer finished, all the jobs are done") # %% main() def main(): parser_namespace,parse_dict = do.get_parser() + download_configs=parser_namespace.download_configs + gen_subseslist=parser_namespace.gen_subseslist + print(parse_dict) + print(gen_subseslist) + # generate template subseslist under the working directory + if gen_subseslist: + if parser_namespace.sub is None or parser_namespace.ses is None: + raise ValueError("gen_subseslist requires -sub and -ses to be provided") + else: + sub_list = parser_namespace.sub + ses_list = parser_namespace.ses + do.generate_subseslist(sub_list,ses_list) + print("######Your template sub_ses_list.txt has been created under the CWD!######") + return + # Check if download_configs argument is provided + if download_configs: + # Ensure the directory exists + if not os.path.exists(download_configs): + os.makedirs(download_configs) + + # Use the mocked version function for testing + launchcontainers_version = do.get_mocked_launchcontainers_version() + + if launchcontainers_version is None: + raise ValueError("Unable to determine launchcontainers version.") + do.download_configs(launchcontainers_version, download_configs) + print("\n######Your example configs has been copied to your indicated directory created under the CWD!######") + return copy_configs=parser_namespace.copy_configs # Check if download_configs argument is provided if copy_configs: @@ -583,50 +718,43 @@ def main(): # Your main function logic here # e.g., launch_container(args.other_arg) # read ymal and setup the launchcontainer program - + # main function with PREPARE and RUN mode + if (not gen_subseslist) or (not download_configs): + print("**********Executing main functionality with arguments*********") + # read ymal and setup the launchcontainer program lc_config_path = parser_namespace.lc_config lc_config = do.read_yaml(lc_config_path) - run_lc = parser_namespace.run_lc verbose = parser_namespace.verbose debug = parser_namespace.debug - - # Get general information from the config.yaml file basedir=lc_config["general"]["basedir"] bidsdir_name=lc_config["general"]["bidsdir_name"] - containerdir=lc_config["general"]["containerdir"] container=lc_config["general"]["container"] analysis_name=lc_config["general"]["analysis_name"] host=lc_config["general"]["host"] - force=lc_config["general"]["force"] print_command_only=lc_config["general"]["print_command_only"] log_dir=lc_config["general"]["log_dir"] log_filename=lc_config["general"]["log_filename"] version = lc_config["container_specific"][container]["version"] + jobqueue_config = lc_config["host_options"][host] # get stuff from subseslist for future jobs scheduling sub_ses_list_path = parser_namespace.sub_ses_list sub_ses_list,num_of_true_run = do.read_df(sub_ses_list_path) - - + if log_dir=="analysis_dir": log_dir=op.join(basedir,bidsdir_name,'derivatives',f'{container}_{version}',f"analysis-{analysis_name}") do.setup_logger(print_command_only,verbose, debug, log_dir, log_filename) # logger the settings - if host == "local": - njobs = lc_config["host_options"][host]["njobs"] - if njobs == "" or njobs is None: - njobs = 2 launch_mode = lc_config["host_options"]["local"]["launch_mode"] valid_options = ["serial", "parallel","dask_worker"] if launch_mode in valid_options: host_str = ( f"{host}, and commands will be launched in {launch_mode} mode " - f"every {njobs} jobs. " f"Serial is safe but it will take longer. " f"If you launch in parallel be aware that some of the " f"processes might be killed if the limit (usually memory) " @@ -648,19 +776,21 @@ def main(): + f'analysis folder is: {lc_config["general"]["analysis_name"]} \n' + f"##################################################### \n" ) - - logger.info("Reading the BIDS layout...") + # Prepare file and launch containers # First of all prepare the analysis folder: it create you the analysis folder automatically so that you are not messing up with different analysis - ananlysis_dir, dict_store_cs_configs = ( + analysis_dir, dict_store_cs_configs = ( prepare.prepare_analysis_folder(parser_namespace, lc_config) ) - + container_configs_under_analysis_folder=dict_store_cs_configs['config_path'] + + logger.info("Reading the BIDS layout...") layout = BIDSLayout(os.path.join(basedir, bidsdir_name)) logger.info("finished reading the BIDS layout.") - path_to_analysis_container_specific_config=dict_store_cs_configs['config_path'] + # Prepare mode + # if DWI Pipeline (preproc, pipeline) if container in [ "anatrois", "rtppreproc", @@ -668,25 +798,38 @@ def main(): "freesurferator", "rtp2-preproc", "rtp2-pipeline" - ]: # TODO: define list in another module for reusability accross modules and functions + ]: logger.debug(f"{container} is in the list") + sub_ses_list= sub_ses_list[(sub_ses_list['dwi'] == "True") & (sub_ses_list['RUN'] == "True")] prepare.prepare_dwi_input( - parser_namespace, ananlysis_dir, lc_config, sub_ses_list, layout, dict_store_cs_configs + parser_namespace, analysis_dir, lc_config, sub_ses_list, layout, dict_store_cs_configs ) - else: - logger.error(f"{container} is not in the list") - - - # Run mode - launchcontainer( - ananlysis_dir, + + future_dict= prepare_dask_futures( + analysis_dir, lc_config, sub_ses_list, - parser_namespace, - path_to_analysis_container_specific_config + dict_store_cs_configs ) + elif container in ["l1_glm"]: + sub_ses_list= sub_ses_list[(sub_ses_list['func'] == "True") & (sub_ses_list['RUN'] == "True")] + prepare.prepare_fmri_input( + parser_namespace, analysis_dir, lc_config, sub_ses_list, dict_store_cs_configs + ) + future_dict= prepare_dask_futures( + analysis_dir, + lc_config, + sub_ses_list, + dict_store_cs_configs + ) + elif container in ["fmriprep"]: + sub_ses_list= sub_ses_list["only have the subs, because it will not get the sessions"] + else: + logger.error(f"{container} is not in the list") + raise KeyError("The container name you input is not supported, can't do prepare or launch jobs") -# #%% + if run_lc: + launch_dask_futures(jobqueue_config,future_dict) if __name__ == "__main__": main() diff --git a/src/launchcontainers/prepare_inputs/__init__.py b/src/launchcontainers/prepare_inputs/__init__.py old mode 100755 new mode 100644 index 9c4832b..e69de29 --- a/src/launchcontainers/prepare_inputs/__init__.py +++ b/src/launchcontainers/prepare_inputs/__init__.py @@ -1,15 +0,0 @@ - -""" -MIT License - -Copyright (c) 2020-2023 Garikoitz Lerma-Usabiaga -Copyright (c) 2020-2022 Mengxing Liu -Copyright (c) 2022-2023 Leandro Lecca -Copyright (c) 2022-2023 Yongning Lei -Copyright (c) 2023 David Linhardt -Copyright (c) 2023 Iñigo Tellaetxe - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -""" \ No newline at end of file diff --git a/src/launchcontainers/prepare_inputs/prepare.py b/src/launchcontainers/prepare_inputs/prepare.py index e28a94b..8fa114d 100755 --- a/src/launchcontainers/prepare_inputs/prepare.py +++ b/src/launchcontainers/prepare_inputs/prepare.py @@ -17,7 +17,6 @@ import os import os.path as op import json -import zipfile # for package mode, the import needs to import launchcontainer module from launchcontainers.prepare_inputs import utils as do @@ -26,7 +25,9 @@ # for testing mode using repo # from prepare_inputs import utils as do # from prepare_inputs import prepare_dwi as dwipre - +import utils as do +from prepare_inputs import prepare_dwi as dwipre +from prepare_inputs import prepare_fmri as fmripre logger = logging.getLogger("Launchcontainers") def prepare_analysis_folder(parser_namespace, lc_config): @@ -46,10 +47,20 @@ def prepare_analysis_folder(parser_namespace, lc_config): force = lc_config["general"]["force"] analysis_name= lc_config['general']['analysis_name'] run_lc = parser_namespace.run_lc - force= force or run_lc - version = lc_config["container_specific"][container]["version"] + force= force and not run_lc bidsdir_name = lc_config['general']['bidsdir_name'] - container_folder = op.join(basedir, bidsdir_name,'derivatives',f'{container}_{version}') + if container in [ + "anatrois", + "rtppreproc", + "rtp-pipeline", + "freesurferator", + "rtp2-preproc", + "rtp2-pipeline" + ]: + version = lc_config["container_specific"][container]["version"] + container_folder = op.join(basedir, bidsdir_name,'derivatives',f'{container}_{version}') + else: + container_folder= op.join(basedir, bidsdir_name,'derivatives',f'{container}') if not op.isdir(container_folder): os.makedirs(container_folder) @@ -59,7 +70,16 @@ def prepare_analysis_folder(parser_namespace, lc_config): ) if not op.isdir(analysis_dir): os.makedirs(analysis_dir) - + + # create log dir for dask + host = lc_config["general"]["host"] + jobqueue_config = lc_config["host_options"][host] + daskworer_logdir = os.path.join(analysis_dir, "daskworker_log") + if jobqueue_config["manager"] in ["sge","slurm"] and not os.path.exists(daskworer_logdir): + os.makedirs(daskworer_logdir) + if jobqueue_config["manager"] in ["local"]: + if (jobqueue_config["launch_mode"]=='dask_worker'): + os.makedirs(daskworer_logdir) ############################################################################################ ############################Copy the configs################################################ ############################################################################################ @@ -79,6 +99,7 @@ def prepare_analysis_folder(parser_namespace, lc_config): dict_store_cs_configs={} dict_store_cs_configs['config_path']=container_configs_under_analysis_folder + dict_store_cs_configs['lc_yaml_path']=lc_config_under_analysis_folder def process_optional_input(container,file_path, analysis_dir, option=None): if os.path.isfile(file_path): @@ -249,7 +270,7 @@ def get_config_dict(container,lc_config,dict_store_cs_configs): rtp2_json_dict= dict_store_cs_configs[container] - if container in ["freesurferator", "anatrois"]: + if container == "freesurferator": config_json_extra={'anat': {'location': { 'path': '/flywheel/v0/input/anat/T1.nii.gz', @@ -266,8 +287,7 @@ def get_config_dict(container,lc_config,dict_store_cs_configs): }, 'base': 'file' } - if 'anat' in config_json_extra.keys() and 'pre_fs' in config_json_extra.keys(): - del config_json_extra['anat'] + else: config_json_extra={} for key in rtp2_json_dict.keys(): @@ -317,7 +337,7 @@ def prepare_dwi_input(parser_namespace, analysis_dir, lc_config, df_subSes, layo container = lc_config["general"]["container"] force = lc_config["general"]["force"] run_lc = parser_namespace.run_lc - force= force or run_lc + force= force and not run_lc version = lc_config["container_specific"][container]["version"] logger.info("\n"+ @@ -343,53 +363,119 @@ def prepare_dwi_input(parser_namespace, analysis_dir, lc_config, df_subSes, layo for row in df_subSes.itertuples(index=True, name="Pandas"): sub = row.sub ses = row.ses - RUN = row.RUN - dwi = row.dwi - - logger.info(f'dwi is {dwi}') + logger.info("\n" +"The current run is: \n" +f"{sub}_{ses}_{container}_{version}\n") - if RUN == "True" and dwi == "True": - - tmpdir = op.join( - analysis_dir, - "sub-" + sub, - "ses-" + ses, - "output", "tmp" - ) - logdir = op.join( - analysis_dir, - "sub-" + sub, - "ses-" + ses, - "output", "log" + + tmpdir = op.join( + analysis_dir, + "sub-" + sub, + "ses-" + ses, + "output", "tmp" + ) + container_logdir = op.join( + analysis_dir, + "sub-" + sub, + "ses-" + ses, + "output", "log" + ) + + if not op.isdir(tmpdir): + os.makedirs(tmpdir) + if not op.isdir(container_logdir): + os.makedirs(container_logdir) + + do.copy_file(parser_namespace.lc_config, op.join(container_logdir,'lc_config.yaml'), force) + config_file_path=dict_store_cs_configs['config_path'] + do.copy_file(config_file_path, op.join(container_logdir,'config.json'), force) + + + if container in ["rtppreproc" ,"rtp2-preproc"]: + dwipre.rtppreproc(dict_store_cs_configs, analysis_dir, lc_config, sub, ses, layout,run_lc) + elif container in ["rtp-pipeline", "rtp2-pipeline"]: + dwipre.rtppipeline(dict_store_cs_configs, analysis_dir,lc_config, sub, ses, layout,run_lc) + elif container in ["anatrois","freesurferator"]: + dwipre.anatrois(dict_store_cs_configs, analysis_dir,lc_config,sub, ses, layout,run_lc) + else: + logger.error("\n"+ + f"***An error occurred" + +f"{container} is not created, check for typos or contact admin for singularity images\n" ) - if not op.isdir(tmpdir): - os.makedirs(tmpdir) - if not op.isdir(logdir): - os.makedirs(logdir) - - do.copy_file(parser_namespace.lc_config, op.join(logdir,'lc_config.yaml'), force) - config_file_path=dict_store_cs_configs['config_path'] - do.copy_file(config_file_path, op.join(logdir,'config.json'), force) + logger.info("\n"+ + "#####################################################\n") + return - if container in ["rtppreproc" ,"rtp2-preproc"]: - dwipre.rtppreproc(dict_store_cs_configs, analysis_dir, lc_config, sub, ses, layout,run_lc) - elif container in ["rtp-pipeline", "rtp2-pipeline"]: - dwipre.rtppipeline(dict_store_cs_configs, analysis_dir,lc_config, sub, ses, layout,run_lc) - elif container in ["anatrois","freesurferator"]: - dwipre.anatrois(dict_store_cs_configs, analysis_dir,lc_config,sub, ses, layout,run_lc) - else: - logger.error("\n"+ - f"***An error occurred" - +f"{container} is not created, check for typos or contact admin for singularity images\n" - ) +def prepare_fmri_input(parser_namespace, analysis_dir, lc_config, df_subSes, dict_store_cs_configs): + """ + This is the major function for doing the preparation, it is doing the work + 1. move and check the onset files is consistant with your yaml input + + Parameters + ---------- + lc_config : TYPE + DESCRIPTION. + df_subSes : TYPE + DESCRIPTION. + + Returns + ------- + None. + + """ + logger.info("\n"+ + "#####################################################\n" + +"Preparing for surface based fMRI \n") + + + container = lc_config["general"]["container"] + force = lc_config["general"]["force"] + run_lc = parser_namespace.run_lc + force= force and not run_lc + l1_glm_yaml_path= dict_store_cs_configs['config_path'] + l1_glm_yaml=do.read_yaml(l1_glm_yaml_path) + + logger.info("\n"+ + "#####################################################\n" + +f"Prepare for {container}\n") + for row in df_subSes.itertuples(index=True, name="Pandas"): + sub = row.sub + ses = row.ses + + tmpdir = op.join( + analysis_dir, + "sub-" + sub, + "ses-" + ses, + "output", "tmp" + ) + logdir = op.join( + analysis_dir, + "sub-" + sub, + "ses-" + ses, + "output", "log" + ) + + if not op.isdir(tmpdir): + os.makedirs(tmpdir) + if not op.isdir(logdir): + os.makedirs(logdir) + + do.copy_file(parser_namespace.lc_config, op.join(logdir,'lc_config.yaml'), force) + + config_file_path=dict_store_cs_configs['config_path'] + do.copy_file(config_file_path, op.join(logdir,'config.json'), force) + if lc_config["container_specific"]["l1_glm"]["onsetdir"]: + + onset_correct=fmripre.move_onset_files_to_bids(lc_config,l1_glm_yaml,sub,ses) + if not onset_correct: + logger.error("Not all the onset files are with correct trial name") + raise ValueError("Please check your onset files") else: - continue + logger.info("There is no input onsetdir, not preparing the onset files") logger.info("\n"+ "#####################################################\n") return diff --git a/src/launchcontainers/prepare_inputs/prepare_dwi.py b/src/launchcontainers/prepare_inputs/prepare_dwi.py index d6a2e3f..b9747cb 100755 --- a/src/launchcontainers/prepare_inputs/prepare_dwi.py +++ b/src/launchcontainers/prepare_inputs/prepare_dwi.py @@ -26,6 +26,8 @@ import zipfile import logging +import utils as do +from utils import read_df # for package mode, the import needs to import launchcontainer module from launchcontainers.prepare_inputs import utils as do from launchcontainers.prepare_inputs.utils import read_df @@ -249,6 +251,11 @@ def anatrois(dict_store_cs_configs, analysis_dir,lc_config, sub, ses, layout,run ) if not op.exists(dstDir_work): os.makedirs(dstDir_work) + # create corresponding folder + if op.exists(dstDir_input) and force: + shutil.rmtree(dstDir_input) + if op.exists(dstDir_output) and force: + shutil.rmtree(dstDir_output) if not op.exists(dstDir_input): os.makedirs(dstDir_input) @@ -401,7 +408,7 @@ def rtppreproc(dict_store_cs_configs, analysis_dir, lc_config, sub, ses, layout, anat_analysis_name = lc_config["container_specific"][container]["anat_analysis_name"] rpe = lc_config["container_specific"][container]["rpe"] - multishell=lc_config["container_specific"][container]["multishell"] + separated_shell_files=lc_config["container_specific"][container]["separated_shell_files"] # define input output folder for this container dstDir_input = op.join( @@ -416,7 +423,13 @@ def rtppreproc(dict_store_cs_configs, analysis_dir, lc_config, sub, ses, layout, "ses-" + ses, "output", ) - + # create corresponding folder + if op.exists(dstDir_input) and force: + shutil.rmtree(dstDir_input) + logger.info("Remove input dir under analysis because you set force to True") + if op.exists(dstDir_output) and force: + shutil.rmtree(dstDir_output) + logger.info("Remove output dir under analysis because you set force to True") if not op.exists(dstDir_input): os.makedirs(dstDir_input) if not op.exists(dstDir_output): @@ -461,7 +474,7 @@ def rtppreproc(dict_store_cs_configs, analysis_dir, lc_config, sub, ses, layout, if int(precontainer_anat.split('.')[1])>5: src_path_FSMASK = op.join(precontainer_anat_dir, "brain.nii.gz") # 3 dwi file that needs to be preprocessed, under BIDS/sub/ses/dwi - if not multishell: + if not separated_shell_files: # the bval src_path_BVAL = layout.get(subject= sub, session=ses, extension='bval',suffix= 'dwi', direction=PE_direction, return_type='filename')[0] # the bve @@ -683,7 +696,13 @@ def rtppipeline(dict_store_cs_configs, analysis_dir,lc_config, sub, ses, layout, "ses-" + ses, "output", ) - + # create corresponding folder + if op.exists(dstDir_input) and force: + shutil.rmtree(dstDir_input) + logger.info("Remove input dir under analysis because you set force to True") + if op.exists(dstDir_output) and force: + shutil.rmtree(dstDir_output) + logger.info("Remove output dir under analysis because you set force to True") if not op.exists(dstDir_input): os.makedirs(dstDir_input) if not op.exists(dstDir_output): diff --git a/src/launchcontainers/prepare_inputs/prepare_fmri.py b/src/launchcontainers/prepare_inputs/prepare_fmri.py new file mode 100644 index 0000000..328f151 --- /dev/null +++ b/src/launchcontainers/prepare_inputs/prepare_fmri.py @@ -0,0 +1,91 @@ +import os +import os.path as op +import shutil +import logging +import pandas as pd + + +logger = logging.getLogger("Launchcontainers") +def format_onset_file(): + + return + +def move_onset_files_to_bids(lc_config,l1_glm_yaml,sub,ses): + basedir=lc_config["general"]["basedir"] + bidsdir_name=lc_config["general"]["bidsdir_name"] + onsetdir_name=lc_config["container_specific"]["l1_glm"]["onsetdir"] + onsetdir=op.join(basedir,onsetdir_name) + bidsdir=op.join(basedir,bidsdir_name) + + task=l1_glm_yaml["experiment"]["task"] + runs=l1_glm_yaml["experiment"]["run_nums"] + onset_corrects=[] + for runnum in runs: + src_fname=f'sub-{sub}_ses-{ses}_task-{task}_run-{runnum}_event.tsv' + target_fname=f'sub-{sub}_ses-{ses}_task-{task}_run-{runnum}_events.tsv' + src_onset=op.join(onsetdir,src_fname) + target_path=op.join(bidsdir,f'sub-{sub}',f'ses-{ses}','func') + if not op.exists(target_path): + os.makedirs(target_path) + + target=op.join(bidsdir,f'sub-{sub}',f'ses-{ses}','func',target_fname) + if os.path.exists(target): + os.remove(target) + try: + shutil.copy(src_onset,target) + except: + continue + onset_correct=check_onset_field_with_glm_yaml(l1_glm_yaml, target) + onset_corrects.append(onset_correct) + + return all(onset_corrects) + +def check_onset_field_with_glm_yaml(l1_glm_yaml, targ_onset_path): + # Get the contrast groups + contrast_groups = l1_glm_yaml['contrast_groups'] + + # Extract all unique values + ymal_uniq_val = set() + for key, values in contrast_groups.items(): + ymal_uniq_val.update(values) + + # Convert set to list and sort + ymal_uniq_val = sorted(list(ymal_uniq_val)) + + onset = pd.read_csv(targ_onset_path, sep='\t') + + # Check if 'trial_type' column exists + if 'trial_type' in onset.columns: + # Get unique values from 'trial_type' column + onset_uniq_vals = onset['trial_type'].unique() + else: + logger.error("The column 'trial_type' does not exist in the file.") + raise ValueError("onset file column is not correct") + onset_uniq_vals_filtered = onset_uniq_vals[onset_uniq_vals != "baseline"] + all_in_yaml=all(item in ymal_uniq_val for item in onset_uniq_vals_filtered) + if all_in_yaml: + logger.debug("This onset is no problem") + else: + logger.error(f'{targ_onset_path} have incorrect trail name') + + return all_in_yaml + + +def smooth_time_series(subject, session, l1_glm_yaml, lc_config): + # get the variables for input and output files + basedir=lc_config['general']['basedir'] + bidsdir_name=lc_config['general']['bidsdir_name'] + fmriprep_dir_name=lc_config['container_specific']['fmri_glm']['fmriprep_dir_name'] + fmriprep_ana_name=lc_config['container_specific']['fmri_glm']['fmriprep_ana_name'] + fmriprep_dir=op.join(basedir,bidsdir_name,'derivatives',fmriprep_dir_name,fmriprep_ana_name) + + task=l1_glm_yaml["experiment"]["task"] + # set up freesurfer environment + + # generate the cmd + #gii_in=f"{fmriprep_dir}/sub-{subject}/ses-{session}/func/sub-{subject}_ses-{session}_task-{task}_run-{run}_hemi-{hemi}_space-{space}_bold.func.gii" + #gii_out=f"{fmriprep_dir}/sub-{subject}/ses-{session}/func/sub-{subject}_ses-{session}_task-{task}_run-{run}_hemi-{hemi}_space-{space}_desc-smoothed0${time_series_smooth_kernel}_bold.func.gii" + + #cmd=f"mris_fwhm --i {gii_in} --o {gii_out} --so --fwhm {time_series_smooth_kernel} --subject sub-{subject} --hemi {hemi} " + cmd=None + return cmd \ No newline at end of file diff --git a/src/launchcontainers/prepare_inputs/utils.py b/src/launchcontainers/prepare_inputs/utils.py old mode 100755 new mode 100644 index 9a0a7f3..2de2358 --- a/src/launchcontainers/prepare_inputs/utils.py +++ b/src/launchcontainers/prepare_inputs/utils.py @@ -54,6 +54,7 @@ def get_parser(): """ parser = argparse.ArgumentParser( description=""" + #########This is a test message to make sure *test_dask* version is being installed############# This python program helps you analysis MRI data through different containers, Before you make use of this program, please prepare the environment, edit the required config files, to match your analysis demand. \n SAMPLE CMD LINE COMMAND \n\n @@ -319,7 +320,7 @@ def get_launchcontainers_version(): return None def get_mocked_launchcontainers_version(): # Specify the version you want to mock for testing purposes - return "0.3.0" + return "0.4.0" def download_configs(version, download_path): #https://github.com/garikoitz/launchcontainers/tree/master/example_configs/0.3.0 github_url = f"https://github.com/garikoitz/launchcontainers/raw/main/example_configs/{version}/example_config.yaml" diff --git a/src/launchcontainers/py_pipeline/__init__.py b/src/launchcontainers/py_pipeline/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/launchcontainers/py_pipeline/l1_glm.py b/src/launchcontainers/py_pipeline/l1_glm.py new file mode 100644 index 0000000..0aa0c8e --- /dev/null +++ b/src/launchcontainers/py_pipeline/l1_glm.py @@ -0,0 +1,596 @@ +# Python script for doing first lvl fMRI analysis +# features 3: +# 1. take fMRIPrep output time series +# 2. Can do time series smoothing for you +# 3. Can do parallel processing and takes little time + +# input: +# config.yaml specifying input folder and whether do smooth or not +# subseslist.txt specifying the subject, session, that will be processed +# l1_analysis.json specifying the contrast you will use +# task: +# run_number: +# freesurfer label: +# output stats: +# TODO: there are other analysis specific stuff like HRF, and how many regressor to include, but now will just go with the one I am using + +# sturcture of the script +''' +def prepare_onset(): +def do_smooth(): + Take input from config.yaml to get the orig time series + Then call freesurfer smooth + + input: from config.yaml to get folder, from config.json to get filename + output: add time_series_smooth_kernel in the filename +def do_glm(): + +def main(): + take input from yaml and json and pass to do_smooth() and do_glm + + take input from subseslist to setup dask + + use dask to generate job script and call do_glm + + future=client.map(do_glm ) + +QA: set time in the code, and monitor the time of each step +Logger: get logger and store the output into a txt +''' +import os.path as op +from os import makedirs +from nilearn.surface import load_surf_data +import numpy as np +from scipy import stats +from nilearn.glm.first_level import ( + make_first_level_design_matrix, + first_level_from_bids, +) +import yaml +from yaml.loader import SafeLoader +from nilearn.glm.first_level.first_level import run_glm +from nilearn.glm.contrasts import compute_contrast +import pandas as pd +import nibabel as nib +import logging +import argparse +from argparse import RawDescriptionHelpFormatter +import sys +sys.path.append(op.abspath(op.join(op.dirname(__file__), '../../'))) + +def read_yaml(path_to_config_file): + """ + Input: + the path to the config file + + Returns + a dictionary that contains all the config info + + """ + with open(path_to_config_file, "r") as v: + config = yaml.load(v, Loader=SafeLoader) + + return config + + +logger = logging.getLogger("Launchcontainers") + +def mask_nii(mask_method, mask, source_nii): + masked_nii=None + return masked_nii + +def save_statmap_to_gifti(data, outname): + """Save a statmap to a gifti file. + data: nilearn contrast model output, e.g., contrast.effect_size() + outname: output file name + """ + gii_to_save = nib.gifti.gifti.GiftiImage() + gii_to_save.add_gifti_data_array( + nib.gifti.gifti.GiftiDataArray(data=data, datatype="NIFTI_TYPE_FLOAT32") + ) + nib.save(gii_to_save, outname) + +def run_l1_glm(subject, session, lc_config, l1_glm_yaml): + # (subject, session, fp_ana_name, output_name, slice_time_ref=0.5, use_smoothed=False, time_series_smooth_kernel=None) + #subject= subject_sessions['BIDS_sub'] + #session= subject_sessions['BIDS_ses'] + # use_smooth: either False 0 or True 01 02 03 04 05 010 + #### + ##### + #for debug + # subject='05' + # session='day2VA' + # slice_time_ref=0.5 + # fp_ana_name='analysis-okazaki_ST05' + # output_name='analysis-testGetL1' + # use_smoothed=False + #### + basedir=lc_config['general']['basedir'] + bidsdir_name=lc_config['general']['bidsdir_name'] + bids = op.join(basedir,bidsdir_name) # op to BIDS root + container=lc_config['general']['container'] + version=lc_config['container_specific'][container]['version'] + analysis_name=lc_config['general']['analysis_name'] + + fmriprep_dir_name=lc_config['container_specific'][container]['fmriprep_dir_name'] + fmriprep_ana_name=lc_config['container_specific'][container]['fmriprep_ana_name'] + fmriprep_dir = op.join( + "derivatives", fmriprep_dir_name , f'analysis-{fmriprep_ana_name}' + ) # BIDS-relative path to fMRIPrep + # get the freesurfer dir + # default freesurer dir is the same as fmriprep, but it could be different + pre_fs=lc_config['container_specific'][container]['pre_fs'] + if not pre_fs: + fs_dir= op.join(fmriprep_dir,'sourcedata','freesurfer') + if pre_fs: + pre_fs_full_path=lc_config['container_specific'][container]['pre_fs_full_path'] + fs_dir= pre_fs_full_path + + use_smoothed=lc_config['container_specific'][container]['use_smoothed'] + time_series_smooth_kernel=lc_config['container_specific'][container]['time_series_smooth_kernel'] + + task = l1_glm_yaml['experiment']['task'] # Task name + run_nums = l1_glm_yaml['experiment']['run_nums'] # Runs to process + dummy_scans = l1_glm_yaml['experiment']['dummy_scans'] # dummy scans at the beginning of the functional acquisition + + space = l1_glm_yaml['model']['space'] # BOLD projected on subject's freesurfer surface + hemis = l1_glm_yaml['model']['hemis'] #, "R"] # L for left, R for right + logger.info(f"input hemis are {hemis}") + mask_EPI =l1_glm_yaml['model']['mask_EPI'] + + if mask_EPI: + mask_method =l1_glm_yaml['model']['mask_method'] + if mask_method=="fslabel": + fslabel_name =l1_glm_yaml['model']['fslabel_name'] + label_dir=op.join(fs_dir, f'sub-{subject}','label') + elif mask_method=='bimap.nii': + maskfile_nii_path =l1_glm_yaml['model']['maskfile_nii_path'] + # the default location is the folder + if len(maskfile_nii_path.split('/'))==1: + maskfile_nii_path=op.join(bids, "derivatives",container,analysis_name,maskfile_nii_path) + + + fslabel_name=l1_glm_yaml['model']['fslabel_name'] + slice_time_ref = l1_glm_yaml['model']['slice_time_ref'] + hrf_model= l1_glm_yaml['model']['hrf_model'] + drift_model= l1_glm_yaml['model']['drift_model'] + drift_order= l1_glm_yaml['model']['drift_order'] + high_pass= l1_glm_yaml['model']['high_pass'] + motion_regressors = l1_glm_yaml['model']['motion_regressors'] + use_acompcor=l1_glm_yaml['model']['use_acompcor'] + use_non_steady_state=l1_glm_yaml['model']['use_non_steady_state'] + use_consine_regressors=l1_glm_yaml['model']['use_consine_regressors'] + + ### Define output directory + analysis_name=lc_config['general']['analysis_name'] + outdir = op.join(bids, "derivatives",f'{container}',f'analysis-{analysis_name}', f'sub-{subject}',f'ses-{session}') + + if not op.exists(outdir): + raise FileNotFoundError("There is no analysis folder") + + ### Loop across hemispheres + for hemi in hemis: + print("Processing hemi", hemi) + if hemi == "lh": + hm = "L" + if hemi == "rh": + hm= "R" + ### Final output dictionary for GLM contrast results (to be combined across runslater) + contrast_objs = {} + gii_allrun=[] + frame_time_allrun=[] + events_allrun=[] + confounds_allrun=[] + store_l1=[] + ### Loop over runs + for idx, run_num in enumerate(run_nums): + print("Processing run", run_num) + + ### Load GIFTI data and z-score it + run = ( + "run-" + run_num + ) # Run string in filename (define as empty string "" if no run label) + func_name = ( + f"sub-{subject}_ses-{session}_task-{task}_{run}_hemi-{hm}_space-{space}_bold.func.gii" + ) + # If you smoothed data beforehand, make sure to point this to your smoothed file name! + print(f"smooth is {use_smoothed}") + if use_smoothed: + func_name = func_name.replace("_bold", f"_desc-smoothed{time_series_smooth_kernel}_bold") + + nii_path = op.join(bids, fmriprep_dir, f'sub-{subject}', f"ses-{session}" ,"func", func_name) + gii_data = load_surf_data(nii_path) + + # remove the dummy scans of all runs and then concat them + gii_data_float=np.vstack(gii_data[:,:]).astype(float) + gii_remove_dummy=gii_data_float[:,dummy_scans::] + gii_data_std = stats.zscore(gii_remove_dummy, axis=1) + + + # # freesurfer label file + # label_path=(f'{label_dir}/lh.votcnov1v2.label') + # mask_votc= load_surf_data(label_path) + + + # ### Get shape of data + n_vertices = np.shape(gii_data_std)[0] + n_scans = np.shape(gii_data_std)[1] + + if mask_EPI: + if mask_method=='fslabel': + label_full_path=f'{label_dir}/{hemi}.{fslabel_name}.label' + label=load_surf_data(label_full_path) + mask=np.zeros((n_vertices,1)) + mask[label]=1 + elif mask_method=='bimap.nii': + # to do hehe + pass + # mask the gii according to the yaml + if mask_EPI: + gii_data_std_masked=gii_data_std*mask + gii_data_float_masked=gii_data_float*mask + # I dont know how to use this, it seems a binary mask + # gii_data_std_masked=nilearn.masking.apply_mask(gii_data_std, mask_votc, dtype='f', smoothing_fwhm=None, ensure_finite=True) + gii_allrun.append(gii_data_std_masked) + else: + gii_allrun.append(gii_data_std) + + ### Use the volumetric data just to get the events and confounds file + img_filters = [("desc", "preproc")] + # specify session + img_filters.append(("ses", session)) + # If multiple runs are present, then add the run number to filter to specify + if len(run) > 0: + img_filters.append(("run", run_num)) + l1 = first_level_from_bids( + bids, + task, + space_label="T1w", + sub_labels=[subject], + slice_time_ref=slice_time_ref, + hrf_model=hrf_model, + drift_model=drift_model, # Do not high_pass since we use fMRIPrep's cosine regressors + drift_order=drift_order, # Do not high_pass since we use fMRIPrep's cosine regressors + high_pass=high_pass, # Do not high_pass since we use fMRIPrep's cosine regressors + img_filters=img_filters, + derivatives_folder=fmriprep_dir, + ) + + ### Extract information from the prepared model + t_r = l1[0][0].t_r + events = l1[2][0][0] # Dataframe of events information + print(l1) + confounds = l1[3][0][0] # Dataframe of confounds + + # get rid of rest so that the setting would be the same as spm + events_nobaseline=events[events.loc[:,'trial_type']!='rest'] + events_nobaseline.loc[:,'onset']=events_nobaseline['onset']+idx*(n_scans)*t_r + + events_allrun.append(events_nobaseline) + store_l1.append(l1) + ### From the confounds file, extract only those of interest + # Start with the motion and acompcor regressors + motion_keys = motion_regressors + # Get ACompCor components (all to explain 50% variance) + + a_compcor_keys = [key for key in confounds.keys() if "a_comp_cor" in key] + + # Now add non-steady-state volumes + non_steady_state_keys = [key for key in confounds.keys() if "non_steady" in key] + + # Add cosine regressors which act to high-pass filter data at 1/128 Hz + cosine_keys = [key for key in confounds.keys() if "cosine" in key] + + # Pull out the confounds we want to keep + confound_keys_keep = ( + motion_keys + a_compcor_keys + cosine_keys + non_steady_state_keys + ) + confounds_keep = confounds[confound_keys_keep] + + # Set first value of FD column to the column mean + confounds_keep["framewise_displacement"][0] = np.nanmean( + confounds_keep["framewise_displacement"] + ) + confounds_keep=confounds_keep.iloc[6:] + confounds_allrun.append(confounds_keep) + ### Create the design matrix + # Start by getting times of scans + frame_times = t_r * ((np.arange(n_scans) + slice_time_ref)+idx*n_scans) + # Now use Nilearn to create the design matrix from the events files + frame_time_allrun.append(frame_times) + + conc_gii_data_std=np.concatenate(gii_allrun, axis=1) + concat_frame_times=np.concatenate(frame_time_allrun, axis=0) + concat_events=pd.concat(events_allrun, axis=0) + concat_confounds=pd.concat(confounds_allrun, axis=0) + nonan_confounds=concat_confounds.dropna(axis=1, how='any') + + design_matrix = make_first_level_design_matrix( + concat_frame_times, + events=concat_events, + hrf_model=hrf_model, # convolve with SPM's canonical HRF function + drift_model=None, # we use fMRIPrep's cosine regressors + add_regs=nonan_confounds, + ) + + + # set the design matrix's NaN value to 0? + + # z-score the design matrix to standardize it + design_matrix_std = stats.zscore(design_matrix, axis=0) + # add constant in to standardized design matrix since you cannot z-score a constant + design_matrix_std["constant"] = np.ones(len(design_matrix_std)).astype(int) + + ### Run the GLM + # Y std or not? + Y = np.transpose(conc_gii_data_std) + X = np.asarray(design_matrix_std) + labels, estimates = run_glm(Y, X, n_jobs=-1) + + ### Define the contrasts + contrast_matrix = np.eye(design_matrix.shape[1]) + basic_contrasts = dict( + [ + (column, contrast_matrix[i]) + for i, column in enumerate(design_matrix.columns) + ] + ) + print(basic_contrasts) + ''' + contrasts_old = { + "AllvsNull": ( + basic_contrasts["adult"] + + basic_contrasts["child"] + + basic_contrasts["body"] + + basic_contrasts["limb"] + + basic_contrasts["JP_word"] + + basic_contrasts["JP_FF"] + + basic_contrasts["JP_CB"] + + basic_contrasts["JP_CS"] + + basic_contrasts["JP_SC"] + ), + "PERvsNull": ( + basic_contrasts["JP_CB"] + + basic_contrasts["JP_SC"] + ), + "LEXvsNull": ( + basic_contrasts["JP_CS"] + + basic_contrasts["JP_FF"] + ), + "PERvsLEX": ( + basic_contrasts["JP_CB"] / 2 + + basic_contrasts["JP_SC"] / 2 + - basic_contrasts["JP_CS"] / 2 + - basic_contrasts["JP_FF"] / 2 + ), + "WordvsLEX": ( + basic_contrasts["JP_word"] + - basic_contrasts["JP_CS"] / 2 + - basic_contrasts["JP_FF"] / 2 + ), + "WordvsPER": ( + basic_contrasts["JP_word"] + - basic_contrasts["JP_CB"] / 2 + - basic_contrasts["JP_SC"] / 2 + ), + "WordvsLEXPER": ( + basic_contrasts["JP_word"] + - basic_contrasts["JP_CS"] / 4 + - basic_contrasts["JP_FF"] / 4 + - basic_contrasts["JP_CB"] / 4 + - basic_contrasts["JP_SC"] / 4 + ), + "WordvsAllnoWordnoLEX": ( + basic_contrasts["JP_word"] + - basic_contrasts["JP_CB"] / 6 + - basic_contrasts["JP_SC"] / 6 + - basic_contrasts["body"] / 6 + - basic_contrasts["limb"] / 6 + - basic_contrasts["adult"] / 6 + - basic_contrasts["child"] / 6 + ), + + "WordvsAllnoWord": ( + basic_contrasts["JP_word"] + - basic_contrasts["JP_CS"] / 8 + - basic_contrasts["JP_FF"] / 8 + - basic_contrasts["JP_CB"] / 8 + - basic_contrasts["JP_SC"] / 8 + - basic_contrasts["body"] / 8 + - basic_contrasts["limb"] / 8 + - basic_contrasts["adult"] / 8 + - basic_contrasts["child"] / 8 + ), + "LEXvsAllnoWordnoLEX": ( + basic_contrasts["JP_CS"] / 2 + + basic_contrasts["JP_FF"] / 2 + - basic_contrasts["JP_CB"] / 6 + - basic_contrasts["JP_SC"] / 6 + - basic_contrasts["body"] / 6 + - basic_contrasts["limb"] / 6 + - basic_contrasts["adult"] / 6 + - basic_contrasts["child"] / 6 + ), + "SCvsCB": ( + basic_contrasts["JP_SC"] + - basic_contrasts["JP_CB"] + + ), + "CSvsFF": ( + basic_contrasts["JP_CS"] + - basic_contrasts["JP_FF"] + + ), + "FacesvsNull": ( + basic_contrasts["adult"] + + basic_contrasts["child"] + ), + "FacesvsLEX": ( + basic_contrasts["adult"] / 2 + + basic_contrasts["child"] / 2 + - basic_contrasts["JP_CS"] / 2 + - basic_contrasts["JP_FF"] / 2 + ), + "FacesvsPER": ( + basic_contrasts["adult"] / 2 + + basic_contrasts["child"] / 2 + - basic_contrasts["JP_CB"] / 2 + - basic_contrasts["JP_SC"] / 2 + ), + "FacesvsLEXPER": ( + basic_contrasts["adult"] / 2 + + basic_contrasts["child"] / 2 + - basic_contrasts["JP_CB"] / 4 + - basic_contrasts["JP_SC"] / 4 + - basic_contrasts["JP_CS"] / 4 + - basic_contrasts["JP_FF"] / 4 + ), + "FacesvsAllnoFace": ( + basic_contrasts["adult"] / 2 + + basic_contrasts["child"] / 2 + - basic_contrasts["JP_CB"] / 7 + - basic_contrasts["JP_SC"] / 7 + - basic_contrasts["JP_CS"] / 7 + - basic_contrasts["JP_FF"] / 7 + - basic_contrasts["body"] / 7 + - basic_contrasts["limb"] / 7 + - basic_contrasts["JP_word"] / 7 + ), + "AdultvsChild": ( + basic_contrasts["adult"] + - basic_contrasts["child"] + ), + + "LimbsvsNull": ( + basic_contrasts["body"] + + basic_contrasts["limb"] + ), + "LimbsvsLEX": ( + basic_contrasts["body"] / 2 + + basic_contrasts["limb"] / 2 + - basic_contrasts["JP_CS"] / 2 + - basic_contrasts["JP_FF"] / 2 + ), + "LimbsvsPER": ( + basic_contrasts["body"] / 2 + + basic_contrasts["limb"] / 2 + - basic_contrasts["JP_CB"] / 2 + - basic_contrasts["JP_SC"] / 2 + ), + "LimbsvsLEXPER": ( + basic_contrasts["body"] / 2 + + basic_contrasts["limb"] / 2 + - basic_contrasts["JP_CB"] / 4 + - basic_contrasts["JP_SC"] / 4 + - basic_contrasts["JP_CS"] / 4 + - basic_contrasts["JP_FF"] / 4 + ), + "LimbsvsAllnoLimbs": ( + basic_contrasts["body"] / 2 + + basic_contrasts["limb"] / 2 + - basic_contrasts["JP_CB"] / 7 + - basic_contrasts["JP_SC"] / 7 + - basic_contrasts["JP_CS"] / 7 + - basic_contrasts["JP_FF"] / 7 + - basic_contrasts["adult"] / 7 + - basic_contrasts["child"] / 7 + - basic_contrasts["JP_word"] / 7 + ), + "BodyvsLimb": ( + basic_contrasts["body"] + - basic_contrasts["limb"] + ) + + } + ''' + # Extract contrasts + contrasts_yaml = l1_glm_yaml['contrasts'] + contrast_groups=l1_glm_yaml['contrast_groups'] + contrasts = {} + for contrast in contrasts_yaml: + contrast_vector = np.zeros_like(next(iter(basic_contrasts.values())), dtype=np.float64) + plus=contrast.split('vs')[0] + minus=contrast.split('vs')[1] + try: + if minus == "Null": + for part in contrast_groups[plus]: + contrast_vector += basic_contrasts[part] + contrasts[contrast]=contrast_vector + else: + con1=contrast_groups[plus] + factor1=len(con1) + con2=contrast_groups[minus] + factor2=len(con2) + for part in con1: + contrast_vector += basic_contrasts[part]/factor1 + for part in con2: + contrast_vector -= basic_contrasts[part]/factor2 + contrasts[contrast]=contrast_vector + except KeyError as e : + logger.error(f"{contrast} contrast have key error, check your yaml") + ### Compute the contrasts + for index, (contrast_id, contrast_val) in enumerate(contrasts.items()): + # Add a label to the output dictionary if not present + if contrast_id not in contrast_objs: + contrast_objs[contrast_id] = [] + + # Define a name template for output statistical maps (stat-X is replaced later on) + outname_base_run = f"sub-{subject}_ses-{session}_task-{task}_hemi-{hemi}_space-{space}_contrast-{contrast_id}_stat-X_statmap.func.gii" + if use_smoothed: + outname_base_run = outname_base_run.replace( + "_statmap", f"_desc-smoothed{time_series_smooth_kernel}_statmap" + ) + outname_base_run = op.join(outdir, outname_base_run) # Place in output directory + + # compute contrast-related statistics + contrast = compute_contrast( + labels, estimates, contrast_val, contrast_type="t" + ) + # add contrast to the output dictionary + contrast_objs[contrast_id].append(contrast) + + # do the run-specific processing + betas = contrast.effect_size() + z_score = contrast.z_score() + t_value = contrast.stat() + p_value = contrast.p_value() + variance = contrast.effect_variance() + + # Save the value maps as GIFTIs + # Effect size + outname = outname_base_run.replace("stat-X", "stat-effect") + save_statmap_to_gifti(betas, outname) + + # z-score + outname = outname_base_run.replace("stat-X", "stat-z") + save_statmap_to_gifti(z_score, outname) + + # t-value + outname = outname_base_run.replace("stat-X", "stat-t") + save_statmap_to_gifti(t_value, outname) + + # p-value + outname = outname_base_run.replace("stat-X", "stat-p") + save_statmap_to_gifti(p_value, outname) + + # variance + outname = outname_base_run.replace("stat-X", "stat-variance") + save_statmap_to_gifti(variance, outname) + + return f"run_glm ingg for {subject} {session}" + +def main(): + parser = argparse.ArgumentParser(description='Run l1_glm with input arguments through the function l1_glm') + parser.add_argument('--subject', required=True, type=str, help='Subject for l1_glm') + parser.add_argument('--session', required=True, type=str, help='Session for l1_glm') + parser.add_argument('--lc_config', required=True, type=str, help='LC config yaml for l1_glm') + parser.add_argument('--l1_glm_yaml', required=True, type=str, help='L1 GLM YAML file for l1_glm') + + args = parser.parse_args() + lc_config=read_yaml(args.lc_config) + l1_glm_yaml=read_yaml(args.l1_glm_yaml) + # Call the function with the provided arguments + run_l1_glm(args.subject, args.session, lc_config, l1_glm_yaml) + +if __name__ == "__main__": + main() diff --git a/src/launchcontainers/pyfunc/__init__.py b/src/launchcontainers/pyfunc/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/launchcontainers/pyfunc/l1_glm.py b/src/launchcontainers/pyfunc/l1_glm.py new file mode 100644 index 0000000..b04edd4 --- /dev/null +++ b/src/launchcontainers/pyfunc/l1_glm.py @@ -0,0 +1,601 @@ +# Python script for doing first lvl fMRI analysis +# features 3: +# 1. take fMRIPrep output time series +# 2. Can do time series smoothing for you +# 3. Can do parallel processing and takes little time + +# input: +# config.yaml specifying input folder and whether do smooth or not +# subseslist.txt specifying the subject, session, that will be processed +# l1_analysis.json specifying the contrast you will use +# task: +# run_number: +# freesurfer label: +# output stats: +# TODO: there are other analysis specific stuff like HRF, and how many regressor to include, but now will just go with the one I am using + +# sturcture of the script +''' +def prepare_onset(): +def do_smooth(): + Take input from config.yaml to get the orig time series + Then call freesurfer smooth + + input: from config.yaml to get folder, from config.json to get filename + output: add time_series_smooth_kernel in the filename +def do_glm(): + +def main(): + take input from yaml and json and pass to do_smooth() and do_glm + + take input from subseslist to setup dask + + use dask to generate job script and call do_glm + + future=client.map(do_glm ) + +QA: set time in the code, and monitor the time of each step +Logger: get logger and store the output into a txt +''' +import os.path as op +from os import makedirs +from nilearn.surface import load_surf_data +import numpy as np +from scipy import stats +from nilearn.glm.first_level import ( + make_first_level_design_matrix, + first_level_from_bids, +) + +from nilearn.glm.first_level.first_level import run_glm +from nilearn.glm.contrasts import compute_contrast +import pandas as pd +import nibabel as nib +import logging +import argparse +from argparse import RawDescriptionHelpFormatter +import sys +sys.path.append(op.abspath(op.join(op.dirname(__file__), '../../'))) +# Now you can import utils +import utils as do +logger = logging.getLogger("Launchcontainers") +def prepare_onset(subject, session, layout): + + return + +def smooth_time_series(subject, session, l1_glm_yaml, lc_config): + # get the variables for input and output files + basedir=lc_config['general']['basedir'] + bidsdir_name=lc_config['general']['bidsdir_name'] + fmriprep_dir_name=lc_config['container_specific']['fmri_glm']['fmriprep_dir_name'] + fmriprep_ana_name=lc_config['container_specific']['fmri_glm']['fmriprep_ana_name'] + fmriprep_dir=op.join(basedir,bidsdir_name,'derivatives',fmriprep_dir_name,fmriprep_ana_name) + + task=l1_glm_yaml["glm_"] + # set up freesurfer environment + + # generate the cmd + #gii_in=f"{fmriprep_dir}/sub-{subject}/ses-{session}/func/sub-{subject}_ses-{session}_task-{task}_run-{run}_hemi-{hemi}_space-{space}_bold.func.gii" + #gii_out=f"{fmriprep_dir}/sub-{subject}/ses-{session}/func/sub-{subject}_ses-{session}_task-{task}_run-{run}_hemi-{hemi}_space-{space}_desc-smoothed0${time_series_smooth_kernel}_bold.func.gii" + + #cmd=f"mris_fwhm --i {gii_in} --o {gii_out} --so --fwhm {time_series_smooth_kernel} --subject sub-{subject} --hemi {hemi} " + cmd=None + return cmd + +def mask_nii(mask_method, mask, source_nii): + masked_nii=None + return masked_nii +def save_statmap_to_gifti(data, outname): + """Save a statmap to a gifti file. + data: nilearn contrast model output, e.g., contrast.effect_size() + outname: output file name + """ + gii_to_save = nib.gifti.gifti.GiftiImage() + gii_to_save.add_gifti_data_array( + nib.gifti.gifti.GiftiDataArray(data=data, datatype="NIFTI_TYPE_FLOAT32") + ) + nib.save(gii_to_save, outname) +def run_l1_glm(subject, session, lc_config, l1_glm_yaml): + # (subject, session, fp_ana_name, output_name, slice_time_ref=0.5, use_smoothed=False, time_series_smooth_kernel=None) + #subject= subject_sessions['BIDS_sub'] + #session= subject_sessions['BIDS_ses'] + # use_smooth: either False 0 or True 01 02 03 04 05 010 + #### + ##### + #for debug + # subject='05' + # session='day2VA' + # slice_time_ref=0.5 + # fp_ana_name='analysis-okazaki_ST05' + # output_name='analysis-testGetL1' + # use_smoothed=False + #### + basedir=lc_config['general']['basedir'] + bidsdir_name=lc_config['general']['bidsdir_name'] + bids = op.join(basedir,bidsdir_name) # op to BIDS root + container=lc_config['general']['container'] + version=lc_config['container_specific'][container]['version'] + + fmriprep_dir_name=lc_config['container_specific'][container]['fmriprep_dir_name'] + fmriprep_ana_name=lc_config['container_specific'][container]['fmriprep_ana_name'] + fmriprep_dir = op.join( + "derivatives", fmriprep_dir_name , f'analysis-{fmriprep_ana_name}' + ) # BIDS-relative path to fMRIPrep + # get the freesurfer dir + # default freesurer dir is the same as fmriprep, but it could be different + pre_fs=lc_config['container_specific'][container]['pre_fs'] + if not pre_fs: + fs_dir= op.join(fmriprep_dir,'sourcedata','freesurfer') + if pre_fs: + pre_fs_full_path=lc_config['container_specific'][container]['pre_fs_full_path'] + fs_dir= pre_fs_full_path + + use_smoothed=lc_config['container_specific'][container]['use_smoothed'] + time_series_smooth_kernel=lc_config['container_specific'][container]['time_series_smooth_kernel'] + + task = l1_glm_yaml['experiment']['task'] # Task name + run_nums = l1_glm_yaml['experiment']['run_nums'] # Runs to process + dummy_scans = l1_glm_yaml['experiment']['dummy_scans'] # dummy scans at the beginning of the functional acquisition + + space = l1_glm_yaml['model']['space'] # BOLD projected on subject's freesurfer surface + hemis = l1_glm_yaml['model']['hemis'] #, "R"] # L for left, R for right + logger.info(f"input hemis are {hemis}") + mask_EPI =l1_glm_yaml['model']['mask_EPI'] + if mask_EPI: + mask_method =l1_glm_yaml['model']['mask_method'] + if mask_method=="fslabel": + fslabel_name =l1_glm_yaml['model']['fslabel_name'] + label_dir=op.join(fs_dir, f'sub-{subject}','label') + elif mask_method=='bimap.nii': + bimap_nii_path =l1_glm_yaml['model']['bimap_nii_path'] + # the default location is the folder + if len(bimap_nii_path.split('/'))==1: + bimap_nii_path=op.join(bids, "derivatives",container,analysis_name,bimap_nii_path) + + + fslabel_name=l1_glm_yaml['model']['fslabel_name'] + slice_time_ref = l1_glm_yaml['model']['slice_time_ref'] + hrf_model= l1_glm_yaml['model']['hrf_model'] + drift_model= l1_glm_yaml['model']['drift_model'] + drift_order= l1_glm_yaml['model']['drift_order'] + high_pass= l1_glm_yaml['model']['high_pass'] + motion_regressors = l1_glm_yaml['model']['motion_regressors'] + use_acompcor=l1_glm_yaml['model']['use_acompcor'] + use_non_steady_state=l1_glm_yaml['model']['use_non_steady_state'] + use_consine_regressors=l1_glm_yaml['model']['use_consine_regressors'] + + ### Define output directory + analysis_name=lc_config['general']['analysis_name'] + outdir = op.join(bids, "derivatives",f'{container}_{version}',f'analysis-{analysis_name}', f'sub-{subject}',f'ses-{session}') + + if not op.exists(outdir): + makedirs(outdir) + + + + ### Loop across hemispheres + for hemi in hemis: + print("Processing hemi", hemi) + if hemi == "lh": + hm = "L" + if hemi == "rh": + hm= "R" + ### Final output dictionary for GLM contrast results (to be combined across runslater) + contrast_objs = {} + gii_allrun=[] + frame_time_allrun=[] + events_allrun=[] + confounds_allrun=[] + store_l1=[] + ### Loop over runs + for idx, run_num in enumerate(run_nums): + print("Processing run", run_num) + + ### Load GIFTI data and z-score it + run = ( + "run-" + run_num + ) # Run string in filename (define as empty string "" if no run label) + func_name = ( + f"sub-{subject}_ses-{session}_task-{task}_{run}_hemi-{hm}_space-{space}_bold.func.gii" + ) + # If you smoothed data beforehand, make sure to point this to your smoothed file name! + print(f"smooth is {use_smoothed}") + if use_smoothed: + func_name = func_name.replace("_bold", f"_desc-smoothed{time_series_smooth_kernel}_bold") + + nii_path = op.join(bids, fmriprep_dir, f'sub-{subject}', f"ses-{session}" ,"func", func_name) + gii_data = load_surf_data(nii_path) + + # remove the dummy scans of all runs and then concat them + gii_data_float=np.vstack(gii_data[:,:]).astype(float) + gii_remove_dummy=gii_data_float[:,dummy_scans::] + gii_data_std = stats.zscore(gii_remove_dummy, axis=1) + + + # # freesurfer label file + # label_path=(f'{label_dir}/lh.votcnov1v2.label') + # mask_votc= load_surf_data(label_path) + + + # ### Get shape of data + n_vertices = np.shape(gii_data_std)[0] + n_scans = np.shape(gii_data_std)[1] + + if mask_EPI: + if mask_method=='fslabel': + label_full_path=f'{label_dir}/{hemi}.{fslabel_name}.label' + label=load_surf_data(label_full_path) + mask=np.zeros((n_vertices,1)) + mask[label]=1 + elif mask_method=='bimap.nii': + # to do hehe + pass + # mask the gii according to the yaml + if mask_EPI: + gii_data_std_masked=gii_data_std*mask + gii_data_float_masked=gii_data_float*mask + # I dont know how to use this, it seems a binary mask + # gii_data_std_masked=nilearn.masking.apply_mask(gii_data_std, mask_votc, dtype='f', smoothing_fwhm=None, ensure_finite=True) + gii_allrun.append(gii_data_std_masked) + else: + gii_allrun.append(gii_data_std) + + ### Use the volumetric data just to get the events and confounds file + img_filters = [("desc", "preproc")] + # specify session + img_filters.append(("ses", session)) + # If multiple runs are present, then add the run number to filter to specify + if len(run) > 0: + img_filters.append(("run", run_num)) + l1 = first_level_from_bids( + bids, + task, + space_label="T1w", + sub_labels=[subject], + slice_time_ref=slice_time_ref, + hrf_model=hrf_model, + drift_model=drift_model, # Do not high_pass since we use fMRIPrep's cosine regressors + drift_order=drift_order, # Do not high_pass since we use fMRIPrep's cosine regressors + high_pass=high_pass, # Do not high_pass since we use fMRIPrep's cosine regressors + img_filters=img_filters, + derivatives_folder=fmriprep_dir, + ) + + ### Extract information from the prepared model + t_r = l1[0][0].t_r + events = l1[2][0][0] # Dataframe of events information + print(l1) + confounds = l1[3][0][0] # Dataframe of confounds + + # get rid of rest so that the setting would be the same as spm + events_nobaseline=events[events.loc[:,'trial_type']!='rest'] + events_nobaseline.loc[:,'onset']=events_nobaseline['onset']+idx*(n_scans)*t_r + + events_allrun.append(events_nobaseline) + store_l1.append(l1) + ### From the confounds file, extract only those of interest + # Start with the motion and acompcor regressors + motion_keys = motion_regressors + # Get ACompCor components (all to explain 50% variance) + + a_compcor_keys = [key for key in confounds.keys() if "a_comp_cor" in key] + + # Now add non-steady-state volumes + non_steady_state_keys = [key for key in confounds.keys() if "non_steady" in key] + + # Add cosine regressors which act to high-pass filter data at 1/128 Hz + cosine_keys = [key for key in confounds.keys() if "cosine" in key] + + # Pull out the confounds we want to keep + confound_keys_keep = ( + motion_keys + a_compcor_keys + cosine_keys + non_steady_state_keys + ) + confounds_keep = confounds[confound_keys_keep] + + # Set first value of FD column to the column mean + confounds_keep["framewise_displacement"][0] = np.nanmean( + confounds_keep["framewise_displacement"] + ) + confounds_keep=confounds_keep.iloc[6:] + confounds_allrun.append(confounds_keep) + ### Create the design matrix + # Start by getting times of scans + frame_times = t_r * ((np.arange(n_scans) + slice_time_ref)+idx*n_scans) + # Now use Nilearn to create the design matrix from the events files + frame_time_allrun.append(frame_times) + + conc_gii_data_std=np.concatenate(gii_allrun, axis=1) + concat_frame_times=np.concatenate(frame_time_allrun, axis=0) + concat_events=pd.concat(events_allrun, axis=0) + concat_confounds=pd.concat(confounds_allrun, axis=0) + nonan_confounds=concat_confounds.dropna(axis=1, how='any') + + design_matrix = make_first_level_design_matrix( + concat_frame_times, + events=concat_events, + hrf_model=hrf_model, # convolve with SPM's canonical HRF function + drift_model=None, # we use fMRIPrep's cosine regressors + add_regs=nonan_confounds, + ) + + + # set the design matrix's NaN value to 0? + + # z-score the design matrix to standardize it + design_matrix_std = stats.zscore(design_matrix, axis=0) + # add constant in to standardized design matrix since you cannot z-score a constant + design_matrix_std["constant"] = np.ones(len(design_matrix_std)).astype(int) + + ### Run the GLM + # Y std or not? + Y = np.transpose(conc_gii_data_std) + X = np.asarray(design_matrix_std) + labels, estimates = run_glm(Y, X, n_jobs=-1) + + ### Define the contrasts + contrast_matrix = np.eye(design_matrix.shape[1]) + basic_contrasts = dict( + [ + (column, contrast_matrix[i]) + for i, column in enumerate(design_matrix.columns) + ] + ) + print(basic_contrasts) + ''' + contrasts_old = { + "AllvsNull": ( + basic_contrasts["adult"] + + basic_contrasts["child"] + + basic_contrasts["body"] + + basic_contrasts["limb"] + + basic_contrasts["JP_word"] + + basic_contrasts["JP_FF"] + + basic_contrasts["JP_CB"] + + basic_contrasts["JP_CS"] + + basic_contrasts["JP_SC"] + ), + "PERvsNull": ( + basic_contrasts["JP_CB"] + + basic_contrasts["JP_SC"] + ), + "LEXvsNull": ( + basic_contrasts["JP_CS"] + + basic_contrasts["JP_FF"] + ), + "PERvsLEX": ( + basic_contrasts["JP_CB"] / 2 + + basic_contrasts["JP_SC"] / 2 + - basic_contrasts["JP_CS"] / 2 + - basic_contrasts["JP_FF"] / 2 + ), + "WordvsLEX": ( + basic_contrasts["JP_word"] + - basic_contrasts["JP_CS"] / 2 + - basic_contrasts["JP_FF"] / 2 + ), + "WordvsPER": ( + basic_contrasts["JP_word"] + - basic_contrasts["JP_CB"] / 2 + - basic_contrasts["JP_SC"] / 2 + ), + "WordvsLEXPER": ( + basic_contrasts["JP_word"] + - basic_contrasts["JP_CS"] / 4 + - basic_contrasts["JP_FF"] / 4 + - basic_contrasts["JP_CB"] / 4 + - basic_contrasts["JP_SC"] / 4 + ), + "WordvsAllnoWordnoLEX": ( + basic_contrasts["JP_word"] + - basic_contrasts["JP_CB"] / 6 + - basic_contrasts["JP_SC"] / 6 + - basic_contrasts["body"] / 6 + - basic_contrasts["limb"] / 6 + - basic_contrasts["adult"] / 6 + - basic_contrasts["child"] / 6 + ), + + "WordvsAllnoWord": ( + basic_contrasts["JP_word"] + - basic_contrasts["JP_CS"] / 8 + - basic_contrasts["JP_FF"] / 8 + - basic_contrasts["JP_CB"] / 8 + - basic_contrasts["JP_SC"] / 8 + - basic_contrasts["body"] / 8 + - basic_contrasts["limb"] / 8 + - basic_contrasts["adult"] / 8 + - basic_contrasts["child"] / 8 + ), + "LEXvsAllnoWordnoLEX": ( + basic_contrasts["JP_CS"] / 2 + + basic_contrasts["JP_FF"] / 2 + - basic_contrasts["JP_CB"] / 6 + - basic_contrasts["JP_SC"] / 6 + - basic_contrasts["body"] / 6 + - basic_contrasts["limb"] / 6 + - basic_contrasts["adult"] / 6 + - basic_contrasts["child"] / 6 + ), + "SCvsCB": ( + basic_contrasts["JP_SC"] + - basic_contrasts["JP_CB"] + + ), + "CSvsFF": ( + basic_contrasts["JP_CS"] + - basic_contrasts["JP_FF"] + + ), + "FacesvsNull": ( + basic_contrasts["adult"] + + basic_contrasts["child"] + ), + "FacesvsLEX": ( + basic_contrasts["adult"] / 2 + + basic_contrasts["child"] / 2 + - basic_contrasts["JP_CS"] / 2 + - basic_contrasts["JP_FF"] / 2 + ), + "FacesvsPER": ( + basic_contrasts["adult"] / 2 + + basic_contrasts["child"] / 2 + - basic_contrasts["JP_CB"] / 2 + - basic_contrasts["JP_SC"] / 2 + ), + "FacesvsLEXPER": ( + basic_contrasts["adult"] / 2 + + basic_contrasts["child"] / 2 + - basic_contrasts["JP_CB"] / 4 + - basic_contrasts["JP_SC"] / 4 + - basic_contrasts["JP_CS"] / 4 + - basic_contrasts["JP_FF"] / 4 + ), + "FacesvsAllnoFace": ( + basic_contrasts["adult"] / 2 + + basic_contrasts["child"] / 2 + - basic_contrasts["JP_CB"] / 7 + - basic_contrasts["JP_SC"] / 7 + - basic_contrasts["JP_CS"] / 7 + - basic_contrasts["JP_FF"] / 7 + - basic_contrasts["body"] / 7 + - basic_contrasts["limb"] / 7 + - basic_contrasts["JP_word"] / 7 + ), + "AdultvsChild": ( + basic_contrasts["adult"] + - basic_contrasts["child"] + ), + + "LimbsvsNull": ( + basic_contrasts["body"] + + basic_contrasts["limb"] + ), + "LimbsvsLEX": ( + basic_contrasts["body"] / 2 + + basic_contrasts["limb"] / 2 + - basic_contrasts["JP_CS"] / 2 + - basic_contrasts["JP_FF"] / 2 + ), + "LimbsvsPER": ( + basic_contrasts["body"] / 2 + + basic_contrasts["limb"] / 2 + - basic_contrasts["JP_CB"] / 2 + - basic_contrasts["JP_SC"] / 2 + ), + "LimbsvsLEXPER": ( + basic_contrasts["body"] / 2 + + basic_contrasts["limb"] / 2 + - basic_contrasts["JP_CB"] / 4 + - basic_contrasts["JP_SC"] / 4 + - basic_contrasts["JP_CS"] / 4 + - basic_contrasts["JP_FF"] / 4 + ), + "LimbsvsAllnoLimbs": ( + basic_contrasts["body"] / 2 + + basic_contrasts["limb"] / 2 + - basic_contrasts["JP_CB"] / 7 + - basic_contrasts["JP_SC"] / 7 + - basic_contrasts["JP_CS"] / 7 + - basic_contrasts["JP_FF"] / 7 + - basic_contrasts["adult"] / 7 + - basic_contrasts["child"] / 7 + - basic_contrasts["JP_word"] / 7 + ), + "BodyvsLimb": ( + basic_contrasts["body"] + - basic_contrasts["limb"] + ) + + } + ''' + # Extract contrasts + contrasts_yaml = l1_glm_yaml['contrasts'] + contrast_groups=l1_glm_yaml['contrast_groups'] + contrasts = {} + for contrast in contrasts_yaml: + contrast_vector = np.zeros_like(next(iter(basic_contrasts.values())), dtype=np.float64) + plus=contrast.split('vs')[0] + minus=contrast.split('vs')[1] + try: + if minus == "Null": + for part in contrast_groups[plus]: + contrast_vector += basic_contrasts[part] + contrasts[contrast]=contrast_vector + else: + con1=contrast_groups[plus] + factor1=len(con1) + con2=contrast_groups[minus] + factor2=len(con2) + for part in con1: + contrast_vector += basic_contrasts[part]/factor1 + for part in con2: + contrast_vector -= basic_contrasts[part]/factor2 + contrasts[contrast]=contrast_vector + except KeyError as e : + logger.error(f"{contrast} contrast have key error, check your yaml") + ### Compute the contrasts + for index, (contrast_id, contrast_val) in enumerate(contrasts.items()): + # Add a label to the output dictionary if not present + if contrast_id not in contrast_objs: + contrast_objs[contrast_id] = [] + + # Define a name template for output statistical maps (stat-X is replaced later on) + outname_base_run = f"sub-{subject}_ses-{session}_task-{task}_hemi-{hemi}_space-{space}_contrast-{contrast_id}_stat-X_statmap.func.gii" + if use_smoothed: + outname_base_run = outname_base_run.replace( + "_statmap", f"_desc-smoothed{time_series_smooth_kernel}_statmap" + ) + outname_base_run = op.join(outdir, outname_base_run) # Place in output directory + + # compute contrast-related statistics + contrast = compute_contrast( + labels, estimates, contrast_val, contrast_type="t" + ) + # add contrast to the output dictionary + contrast_objs[contrast_id].append(contrast) + + # do the run-specific processing + betas = contrast.effect_size() + z_score = contrast.z_score() + t_value = contrast.stat() + p_value = contrast.p_value() + variance = contrast.effect_variance() + + # Save the value maps as GIFTIs + # Effect size + outname = outname_base_run.replace("stat-X", "stat-effect") + save_statmap_to_gifti(betas, outname) + + # z-score + outname = outname_base_run.replace("stat-X", "stat-z") + save_statmap_to_gifti(z_score, outname) + + # t-value + outname = outname_base_run.replace("stat-X", "stat-t") + save_statmap_to_gifti(t_value, outname) + + # p-value + outname = outname_base_run.replace("stat-X", "stat-p") + save_statmap_to_gifti(p_value, outname) + + # variance + outname = outname_base_run.replace("stat-X", "stat-variance") + save_statmap_to_gifti(variance, outname) + + return f"run_glm ingg for {subject} {session}" + +def main(): + parser = argparse.ArgumentParser(description='Run l1_glm with input arguments') + parser.add_argument('--subject', required=True, help='Subject for l1_glm') + parser.add_argument('--session', required=True, help='Session for l1_glm') + parser.add_argument('--lc_config', required=True, help='LC config yaml for l1_glm') + parser.add_argument('--l1_glm_yaml', required=True, help='L1 GLM YAML file for l1_glm') + + args = parser.parse_args() + lc_config=do.read_yaml(args.lc_config) + l1_glm_yaml=do.read_yaml(args.l1_glm_yaml) + # Call the function with the provided arguments + run_l1_glm(args.subject, args.session, lc_config, l1_glm_yaml) + +if __name__ == "__main__": + main() diff --git a/src/launchcontainers/prepare_inputs/check_parser.py b/src/launchcontainers/test/check_parser.py similarity index 100% rename from src/launchcontainers/prepare_inputs/check_parser.py rename to src/launchcontainers/test/check_parser.py diff --git a/src/launchcontainers/test/test_dask.py b/src/launchcontainers/test/test_dask.py new file mode 100755 index 0000000..d3c060a --- /dev/null +++ b/src/launchcontainers/test/test_dask.py @@ -0,0 +1,438 @@ +# -*- coding: utf-8 -*- +""" +MIT License + +Copyright (c) 2020-2024 Garikoitz Lerma-Usabiaga +Copyright (c) 2020-2022 Mengxing Liu +Copyright (c) 2022-2024 Leandro Lecca +Copyright (c) 2022-2024 Yongning Lei +Copyright (c) 2023 David Linhardt +Copyright (c) 2023 Iñigo Tellaetxe + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +""" +import os +import os.path as op +import subprocess as sp +from subprocess import Popen +import numpy as np +import logging +import math + +# modules in lc + +from bids import BIDSLayout +from dask.distributed import progress + +# for package mode, the import needs to import launchcontainer module +from launchcontainers.prepare_inputs import dask_scheduler_config as dsq +from launchcontainers.prepare_inputs import prepare as prepare +from launchcontainers.prepare_inputs import utils as do + +# for testing mode through , we can use relative import +# from prepare_inputs import dask_scheduler_config as dsq +# from prepare_inputs import prepare as prepare +# from prepare_inputs import utils as do + + +logger = logging.getLogger("Launchcontainers") + + +# %% launchcontainers +def generate_cmd( + lc_config, sub, ses, analysis_dir, run_lc +): + """Puts together the command to send to the container. + + Args: + lc_config (str): _description_ + sub (str): _description_ + ses (str): _description_ + analysis_dir (str): _description_ + lst_container_specific_configs (list): _description_ + run_lc (str): _description_ + + Raises: + ValueError: Raised in presence of a faulty config.yaml file, or when the formed command is not recognized. + + Returns: + _type_: _description_ + """ + + # Relevant directories + # All other relevant directories stem from this one + basedir = lc_config["general"]["basedir"] + + homedir = os.path.join(basedir, "singularity_home") + container = lc_config["general"]["container"] + host = lc_config["general"]["host"] + containerdir = lc_config["general"]["containerdir"] + + # Information relevant to the host and container + jobqueue_config = lc_config["host_options"][host] + version = lc_config["container_specific"][container]["version"] + use_module = jobqueue_config["use_module"] + bind_options = jobqueue_config["bind_options"] + + # Location of the Singularity Image File (.sif) + container_name = os.path.join(containerdir, f"{container}_{version}.sif") + # Define the directory and the file name to output the log of each subject + container_logdir = os.path.join(analysis_dir, "sub-" + sub, "ses-" + ses, "output", "log") + logfilename = f"{container_logdir}/t-{container}-sub-{sub}_ses-{ses}" + + path_to_sub_derivatives = os.path.join(analysis_dir, f"sub-{sub}", f"ses-{ses}") + + bind_cmd = "" + for bind in bind_options: + bind_cmd += f"--bind {bind}:{bind} " + + env_cmd = "" + cmd = ( + f"{env_cmd} " + f"echo we are testing" + ) + # If after all configuration, we do not have command, raise an error + if cmd is None: + logger.error( + "\n" + + f"the DWI PIPELINE command is not assigned, please check your config.yaml[general][host] session\n" + ) + raise ValueError("cmd is not defined, aborting") + + # GLU: I don't think this is right, run is done below, I will make it work just for local but not in here, + # it is good that this function just creates the cmd, I would keep it like that + if run_lc: + return(sp.run(cmd, shell = True)) + else: + return cmd + # sp.run(cmd, shell=True) + #return cmd + + +# %% the launchcontainer +def launchcontainer( + analysis_dir, + lc_config, + sub_ses_list, + parser_namespace +): + """ + This function launches containers generically in different Docker/Singularity HPCs + This function is going to assume that all files are where they need to be. + + Args: + analysis_dir (str): _description_ + lc_config (str): path to launchcontainer config.yaml file + sub_ses_list (_type_): parsed CSV containing the subject list to be analyzed, and the analysis options + parser_namespace (argparse.Namespace): command line arguments + """ + logger.info("\n" + "#####################################################\n") + + # Get the host and jobqueue config info from the config.yaml file + host = lc_config["general"]["host"] + jobqueue_config = lc_config["host_options"][host] + if host == "local": + launch_mode = jobqueue_config["launch_mode"] + logger.debug(f"\n,, this is the job_queue config {jobqueue_config}") + + force = lc_config["general"]["force"] + daskworker_logdir = os.path.join(analysis_dir, "daskworker_log") + + # Count how many jobs we need to launch from sub_ses_list + n_jobs = np.sum(sub_ses_list.RUN == "True") + + run_lc = parser_namespace.run_lc + + lc_configs = [] + subs = [] + sess = [] + dir_analysiss = [] + paths_to_analysis_config_json = [] + run_lcs = [] + # PREPARATION mode + if not run_lc: + logger.critical( + f"\nlaunchcontainers.py was run in PREPARATION mode (without option --run_lc)\n" + f"Please check that: \n" + f" (1) launchcontainers.py prepared the input data properly\n" + f" (2) the command created for each subject is properly formed\n" + f" (you can copy the command for one subject and launch it " + f"on the prompt before you launch multiple subjects\n" + f" (3) Once the check is done, launch the jobs by adding --run_lc to the first command you executed.\n" + ) + # If the host is not local, print the job script to be launched in the cluster. + if host != "local" or (host == "local" and launch_mode == "dask_worker"): + client, cluster = create_cluster_client(jobqueue_config, n_jobs, daskworker_logdir) + if host != "local": + logger.critical( + f"The cluster job script for this command is:\n" + f"{cluster.job_script()}" + ) + elif host == "local" and launch_mode == "dask_worker": + logger.critical( + f"The cluster job script for this command is:\n" + f"{cluster}" + ) + # Iterate over the provided subject list + commands = list() + for row in sub_ses_list.itertuples(index=True, name="Pandas"): + sub = row.sub + ses = row.ses + RUN = row.RUN + dwi = row.dwi + + if RUN == "True": + # Append config, subject, session, and path info in corresponding lists + lc_configs.append(lc_config) + subs.append(sub) + sess.append(ses) + dir_analysiss.append(analysis_dir) + run_lcs.append(run_lc) + + # This cmd is only for print the command + command = generate_cmd( + lc_config, + sub, + ses, + analysis_dir, + False # set to False to print the command + ) + commands.append(command) + if not run_lc: + logger.critical( + f"\nCOMMAND for subject-{sub}, and session-{ses}:\n" + f"{command}\n\n" + ) + + if not run_lc and lc_config["general"]["container"] == "fmriprep": + logger.critical( + f"\n" + f"fmriprep now can not deal with session specification, " + f"so the analysis are running on all sessions of the " + f"subject you are specifying" + ) + + # RUN mode + if run_lc and host != "local": + run_dask( + jobqueue_config, + n_jobs, + daskworker_logdir, + lc_configs, + subs, + sess, + dir_analysiss, + paths_to_analysis_config_json, + run_lcs + ) + + if run_lc and host == "local": + if launch_mode == "parallel": + k = 0 + njobs = jobqueue_config["njobs"] + if njobs == "" or njobs is None: + njobs = 2 + steps = math.ceil(len(commands)/njobs) + logger.critical( + f"\nLocally launching {len(commands)} jobs in parallel every {njobs} jobs " + f"in {steps} steps, check your server's memory, some jobs might fail\n" + ) + for stp in range(steps): + if stp == range(steps)[-1] and (k+njobs) <= len(commands): + selected_commands = commands[k:len(commands)] + else: + selected_commands = commands[k:k+njobs] + logger.critical( + f"JOBS in step {stp+1}:\n{selected_commands}\n" + ) + procs = [ Popen(i, shell=True) for i in selected_commands ] + for p in procs: + p.wait() + k = k+njobs + + elif launch_mode == "dask_worker": + logger.critical( + f"\nLocally launching {len(commands)} jobs with dask-worker, " + f" keep an eye on your server's memory\n" + ) + run_dask( + jobqueue_config, + n_jobs, + daskworker_logdir, + lc_configs, + subs, + sess, + dir_analysiss, + paths_to_analysis_config_json, + run_lcs + ) + elif launch_mode == "serial": # Run this with dask... + logger.critical( + f"Locally launching {len(commands)} jobs in series, this might take a lot of time" + ) + serial_cmd = "" + for i, cmd in enumerate(commands): + if i == 0: + serial_cmd = cmd + else: + serial_cmd += f" && {cmd}" + logger.critical( + f"LAUNCHING SUPER SERIAL {len(commands)} JOBS:\n{serial_cmd}\n" + ) + sp.run(serial_cmd, shell=True) + + return + +def create_cluster_client(jobqueue_config, n_jobs, daskworker_logdir): + client, cluster = dsq.dask_scheduler(jobqueue_config, n_jobs, daskworker_logdir) + return client, cluster + +def run_dask( + jobqueue_config, + n_jobs, + daskworker_logdir, + lc_configs, + subs, + sess, + dir_analysiss, + paths_to_analysis_config_json, + run_lcs + ): + + client, cluster = create_cluster_client(jobqueue_config, n_jobs, daskworker_logdir) + logger.info( + "---this is the cluster and client\n" + f"{client} \n cluster: {cluster} \n" + ) + print(subs) + print(sess) + # Compose the command to run in the cluster + futures = client.map( + generate_cmd, + lc_configs, + subs, + sess, + dir_analysiss, + paths_to_analysis_config_json, + run_lcs + ) + # Record the progress + # progress(futures) + # Get the info and report it in the logger + results = client.gather(futures) + logger.info(results) + logger.info("###########") + # Close the connection with the client and the cluster, and inform about it + client.close() + cluster.close() + + logger.critical("\n" + "launchcontainer finished, all the jobs are done") + #return client, cluster + + + +# %% main() +def main(): + parser_namespace,parse_dict = do.get_parser() + copy_configs=parser_namespace.copy_configs + # Check if download_configs argument is provided + if copy_configs: + # Ensure the directory exists + if not os.path.exists(copy_configs): + os.makedirs(copy_configs) + launchcontainers_version = do.copy_configs(copy_configs) + # # Use the mocked version function for testing + # launchcontainers_version = do.get_mocked_launchcontainers_version() + + # if launchcontainers_version is None: + # raise ValueError("Unable to determine launchcontainers version.") + # do.download_configs(launchcontainers_version, download_configs) + else: + # Proceed with normal main functionality + print("Executing main functionality with arguments") + # Your main function logic here + # e.g., launch_container(args.other_arg) + # read ymal and setup the launchcontainer program + + lc_config_path = parser_namespace.lc_config + lc_config = do.read_yaml(lc_config_path) + + run_lc = parser_namespace.run_lc + verbose = parser_namespace.verbose + debug = parser_namespace.debug + + + # Get general information from the config.yaml file + basedir=lc_config["general"]["basedir"] + bidsdir_name=lc_config["general"]["bidsdir_name"] + containerdir=lc_config["general"]["containerdir"] + container=lc_config["general"]["container"] + analysis_name=lc_config["general"]["analysis_name"] + host=lc_config["general"]["host"] + force=lc_config["general"]["force"] + print_command_only=lc_config["general"]["print_command_only"] + log_dir=lc_config["general"]["log_dir"] + log_filename=lc_config["general"]["log_filename"] + + version = lc_config["container_specific"][container]["version"] + # get stuff from subseslist for future jobs scheduling + sub_ses_list_path = parser_namespace.sub_ses_list + sub_ses_list,num_of_true_run = do.read_df(sub_ses_list_path) + + + if log_dir=="analysis_dir": + log_dir=op.join(basedir,bidsdir_name,'derivatives',f'{container}_{version}',f"analysis-{analysis_name}") + + do.setup_logger(print_command_only,verbose, debug, log_dir, log_filename) + + # logger the settings + + if host == "local": + njobs = lc_config["host_options"][host]["njobs"] + if njobs == "" or njobs is None: + njobs = 2 + launch_mode = lc_config["host_options"]["local"]["launch_mode"] + valid_options = ["serial", "parallel","dask_worker"] + if launch_mode in valid_options: + host_str = ( + f"{host}, \n and commands will be launched in {launch_mode} mode \n" + f"every {njobs} jobs. " + f"Serial is safe but it will take longer. " + f"If you launch in parallel be aware that some of the " + f"processes might be killed if the limit (usually memory) " + f"of the machine is reached. " + ) + else: + do.die( + f"local:launch_mode {launch_mode} was passed, valid options are {valid_options}" + ) + else: + host_str=f" host is {host}" + logger.critical( + "\n" + + "#####################################################\n" + + f"Successfully read the config file {lc_config_path} \n" + + f"SubsesList is read, there are {num_of_true_run} jobs needed to be launched" + + f'Basedir is: {lc_config["general"]["basedir"]} \n' + + f'Container is: {container}_{lc_config["container_specific"][container]["version"]} \n' + + f"Host is: {host_str} \n" + + f'analysis folder is: {lc_config["general"]["analysis_name"]} \n' + + f"##################################################### \n" + ) + + + analysis_dir="/home/tlei/Desktop" + + # Run mode + launchcontainer( + analysis_dir, + lc_config, + sub_ses_list, + parser_namespace + ) + + +# #%% +if __name__ == "__main__": + main() diff --git a/src/launchcontainers/utils.py b/src/launchcontainers/utils.py new file mode 100644 index 0000000..9ffadbf --- /dev/null +++ b/src/launchcontainers/utils.py @@ -0,0 +1,311 @@ +""" +MIT License + +Copyright (c) 2020-2023 Garikoitz Lerma-Usabiaga +Copyright (c) 2020-2022 Mengxing Liu +Copyright (c) 2022-2024 Leandro Lecca +Copyright (c) 2022-2023 Yongning Lei +Copyright (c) 2023 David Linhardt +Copyright (c) 2023 Iñigo Tellaetxe + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +""" + +import argparse +from argparse import RawDescriptionHelpFormatter +import yaml +from yaml.loader import SafeLoader +import logging +import os +import shutil +import sys +import pandas as pd +import os.path as op +from os import makedirs +import requests +logger = logging.getLogger("Launchcontainers") + + +def die(*args): + logger.error(*args) + sys.exit(1) + + +def get_parser(): + """ + Input: + Parse command line inputs + + Returns: + a dict stores information about the cmd input + + """ + parser = argparse.ArgumentParser( + prog="Launchcontainers", + description=""" + This python program helps you analysis MRI data through different containers, + Before you make use of this program, please prepare the environment, edit the required config files, to match your analysis demand. \n + SAMPLE CMD LINE COMMAND \n\n + ###########STEP1############# \n + To begin the analysis, you need to first prepare and check the input files by typing this command in your bash prompt: + python path/to/the/launchcontianer.py -lcc path/to/launchcontainer_config.yaml -ssl path/to/subject_session_info.txt + -cc path/to/container_specific_config.json \n + ##--cc note, for the case of rtp-pipeline, you need to input two paths, one for config.json and one for tractparm.csv \n\n + ###########STEP2############# \n + After you have done step 1, all the config files are copied to BIDS/sub/ses/analysis/ directory + When you are confident everything is there, press up arrow to recall the command in STEP 1, and just add --run_lc after it. \n\n + + We add lots of check in the script to avoid program breakdowns. if you found new bugs while running, do not hesitate to contact us""", + formatter_class=RawDescriptionHelpFormatter, + ) + + + parser.add_argument( + "-lcc", + "--lc_config", + type=str, + # default="", + help="path to the config file", + ) + parser.add_argument( + "-ssl", + "--sub_ses_list", + type=str, + # default="", + help="path to the subSesList", + ) + parser.add_argument( + "-cc", + "--container_specific_config", + type=str, + # default=["/export/home/tlei/tlei/PROJDATA/TESTDATA_LC/Testing_02/BIDS/config.json"], + help="path to the container specific config file, it stores the parameters for the container." + ) + + parser.add_argument( + "--run_lc", + action="store_true", + help="if you type --run_lc, the entire program will be launched, jobs will be send to \ + cluster and launch the corresponding container you suggest in config_lc.yaml. \ + We suggest that the first time you run launchcontainer.py, leave this argument empty. \ + then the launchcontainer.py will prepare \ + all the input files for you and print the command you want to send to container, after you \ + check all the configurations are correct and ready, you type --run_lc to make it run", + ) + + # parser.add_argument( + # "--quite", + # action="store_true", + # help="if you want to open quite mode, type --quite, then it will print you only the warning level ", + # ) + parser.add_argument( + "--verbose", + action="store_true", + help="if you want to open verbose mode, type --verbose, the the level will be info", + ) + parser.add_argument( + "--debug", + action="store_true", + help="if you want to find out what is happening of particular step, --type debug, this will print you more detailed information", + ) + parser.add_argument( + '--download_configs', + action="store_true", + help='Path to download the configs') + + parser.add_argument( + '--gen_subseslist', + action="store_true", + help='if you want to generate a template subseslist based on the sub and session you provide') + + parser.add_argument("-sub", nargs='+', help="List of subjects") + parser.add_argument("-ses", nargs='+', help="List of sessions") + if len(sys.argv) == 1: + parser.print_help(sys.stderr) + sys.exit(1) + + parse_dict = vars(parser.parse_args()) + parse_namespace = parser.parse_args() + + return parse_namespace, parse_dict + + +def read_yaml(path_to_config_file): + """ + Input: + the path to the config file + + Returns + a dictionary that contains all the config info + + """ + with open(path_to_config_file, "r") as v: + config = yaml.load(v, Loader=SafeLoader) + + return config + + +def read_df(path_to_df_file): + """ + Input: + path to the subject and session list txt file + + Returns + a dataframe + + """ + outputdf = pd.read_csv(path_to_df_file, sep=",", dtype=str) + try: + num_of_true_run = len(outputdf.loc[outputdf['RUN']=="True"]) + except: + num_of_true_run=None + logger.warn(f"The df you are reading is not subseslist") + """ # Print the result + logger.info( + "\n" + + "#####################################################\n" + + f"The dataframe{path_to_df_file} is successfully read\n" + + f"The DataFrame has {num_rows} rows \n" + + "#####################################################\n" + ) + """ + return outputdf,num_of_true_run + +def setup_logger(print_command_only, verbose=False, debug=False, log_dir=None, log_filename=None): + ''' + stream_handler_level: str, optional + if no input, it will be default at INFO level, this will be the setting for the command line logging + + verbose: bool, optional + debug: bool, optional + log_dir: str, optional + if no input, there will have nothing to be saved in log file but only the command line output + + log_filename: str, optional + the name of your log_file. + + ''' + # set up the lowest level for the logger first, so that all the info will be get + logger.setLevel(logging.DEBUG) + + + # set up formatter and handler so that the logging info can go to stream or log files + # with specific format + log_formatter = logging.Formatter( + "%(asctime)s (%(name)s):[%(levelname)s] %(module)s - %(funcName)s() - line:%(lineno)d $ %(message)s ", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + stream_formatter = logging.Formatter( + "(%(name)s):[%(levelname)s] %(module)s:%(funcName)s:%(lineno)d %(message)s" + ) + # Define handler and formatter + stream_handler = logging.StreamHandler() + stream_handler.setFormatter(stream_formatter) + if verbose: + stream_handler.setLevel(logging.INFO) + elif print_command_only: + stream_handler.setLevel(logging.CRITICAL) + elif debug: + stream_handler.setLevel(logging.DEBUG) + else: + stream_handler.setLevel(logging.WARNING) + logger.addHandler(stream_handler) + + if log_dir: + if not os.path.isdir(log_dir): + makedirs(log_dir) + + + file_handler_info = ( + logging.FileHandler(op.join(log_dir, f'{log_filename}_info.log'), mode='a') + ) + file_handler_error = ( + logging.FileHandler(op.join(log_dir, f'{log_filename}_error.log'), mode='a') + ) + file_handler_info.setFormatter(log_formatter) + file_handler_error.setFormatter(log_formatter) + + file_handler_info.setLevel(logging.INFO) + file_handler_error.setLevel(logging.ERROR) + logger.addHandler(file_handler_info) + logger.addHandler(file_handler_error) + + + return logger +# %% generic function shared in the program +def copy_file(src_file, dst_file, force): + logger.info("\n" + "#####################################################\n") + if not os.path.isfile(src_file): + logger.error(" An error occurred") + raise FileExistsError("the source file is not here") + + logger.info("\n" + f"---start copying {src_file} to {dst_file} \n") + try: + if ((not os.path.isfile(dst_file)) or (force)) or ( + os.path.isfile(dst_file) and force + ): + shutil.copy(src_file, dst_file) + logger.info( + "\n" + + f"---{src_file} has been successfully copied to {os.path.dirname(src_file)} directory \n" + + f"---REMEMBER TO CHECK/EDIT TO HAVE THE CORRECT PARAMETERS IN THE FILE\n" + ) + elif os.path.isfile(dst_file) and not force: + logger.warning( + "\n" + f"---copy are not operating, the {src_file} already exist" + ) + + # If source and destination are same + except shutil.SameFileError: + logger.error("***Source and destination represents the same file.\n") + raise + # If there is any permission issue + except PermissionError: + logger.error("***Permission denied.\n") + raise + # For other errors + except: + logger.error("***Error occurred while copying file\n") + raise + logger.info("\n" + "#####################################################\n") + + return dst_file +def get_launchcontainers_version(): + try: + from importlib.metadata import version + except ImportError: # For Python < 3.8 + from pkg_resources import get_distribution as version + + try: + return version('launchcontainers') + except Exception as e: + logger.error(f"Error getting launchcontainers version: {e}") + return None +def get_mocked_launchcontainers_version(): + # Specify the version you want to mock for testing purposes + return "0.3.0" +def download_configs(version, download_path): + github_url = f"https://github.com/garikoitz/launchcontainers/raw/main/example_configs/{version}/example_config.yaml" #https://github.com/garikoitz/launchcontainers/tree/master/example_configs/0.3.0 + response = requests.get(github_url) + + if response.status_code == 200: + config_path = os.path.join(download_path, f"{version}_config.yaml") + with open(config_path, 'wb') as file: + file.write(response.content) + logger.info(f"Configs for version {version} downloaded successfully.") + else: + logger.error(f"Failed to download configs for version {version}. HTTP Status Code: {response.status_code}") + +def generate_subseslist(lst_sub,lst_ses): + data = [] + for sub in lst_sub: + for ses in lst_ses: + data.append([sub, ses, True, True, True, True]) + + # Create the DataFrame + df = pd.DataFrame(data, columns=['sub', 'ses', 'RUN', 'anat', 'dwi', 'func']) + output_file = 'sub_ses_list.txt' + df.to_csv(output_file, sep=',', index=False) \ No newline at end of file diff --git a/test_json.ipynb b/test_json.ipynb deleted file mode 100644 index e21d301..0000000 --- a/test_json.ipynb +++ /dev/null @@ -1,1026 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [], - "source": [ - "import json\n", - "import yaml\n", - "import os.path as op\n", - "import os\n", - "from yaml.loader import SafeLoader" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "def read_yaml(path_to_config_file):\n", - " \"\"\"\n", - " Input:\n", - " the path to the config file\n", - "\n", - " Returns\n", - " a dictionary that contains all the config info\n", - "\n", - " \"\"\"\n", - " with open(path_to_config_file, \"r\") as v:\n", - " config = yaml.load(v, Loader=SafeLoader)\n", - "\n", - " \"\"\" container = config[\"general\"][\"container\"]\n", - " host = config[\"general\"][\"host\"]\n", - " njobs = config[\"host_options\"][host][\"njobs\"]\n", - " if njobs == \"\" or njobs is None:\n", - " njobs = 2\n", - " host_str = f\"{host}\"\n", - " if host == \"local\":\n", - " launch_mode = config[\"host_options\"][\"local\"][\"launch_mode\"]\n", - " valid_options = [\"serial\", \"parallel\",\"dask_worker\"]\n", - " if launch_mode in valid_options:\n", - " host_str = (\n", - " f\"{host_str}, and commands will be launched in {launch_mode} mode \"\n", - " f\"every {njobs} jobs. \"\n", - " f\"Serial is safe but it will take longer. \"\n", - " f\"If you launch in parallel be aware that some of the \"\n", - " f\"processes might be killed if the limit (usually memory) \"\n", - " f\"of the machine is reached. \"\n", - " )\n", - " else:\n", - " die(\n", - " f\"local:launch_mode {launch_mode} was passed, valid options are {valid_options}\"\n", - " )\n", - "\n", - " logger.warning(\n", - " \"\\n\"\n", - " + \"#####################################################\\n\"\n", - " + f\"Successfully read the config file {path_to_config_file} \\n\"\n", - " + f'Basedir is: {config[\"general\"][\"basedir\"]} \\n'\n", - " + f'Container is: {container}_{config[\"container_specific\"][container][\"version\"]} \\n'\n", - " + f\"Host is: {host_str} \\n\"\n", - " + f'analysis folder is: {config[\"general\"][\"analysis_name\"]} \\n'\n", - " + f\"##################################################### \\n\"\n", - " )\n", - " \"\"\"\n", - " return config" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "lc_config=read_yaml('/Users/tiger/soft/launchcontainers/example_configs/0.3.0/example_config.yaml')\n", - "container=\"freesurferator\"" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": {}, - "outputs": [], - "source": [ - "yaml_info=lc_config[\"container_specific\"][container]\n", - "fs_json_keys=['pre_fs','control_points','annotfile', 'mniroizip']\n", - "fs_json_val=['pre_fs/existingFS.zip','control_points/control.dat','mniroizip/mniroizip.zip','annotfile/annotfile.zip']\n", - "\n", - "fs_json_dict= {key: value for key, value in zip(fs_json_keys, fs_json_val)}\n", - "json_template={'anat': \n", - " {'location': {\n", - " 'path': '/flywheel/v0/input/anat/T1.nii.gz', \n", - " 'name': 'T1.nii.gz',\n", - " },\n", - " 'base': 'file'}\n", - " }" - ] - }, - { - "cell_type": "code", - "execution_count": 36, - "metadata": {}, - "outputs": [], - "source": [ - "for key in fs_json_dict.keys():\n", - " if key in yaml_info.keys() and yaml_info[key]:\n", - " json_template[key] = {\n", - " 'location': {\n", - " 'path': op.join('/flywheel/v0/input', fs_json_dict[key]), \n", - " 'name': op.basename(fs_json_dict[key])\n", - " },\n", - " 'base': 'file'\n", - " }\n", - " if 'anat' in json_template.keys() and 'pre_fs' in json_template.keys():\n", - " json_template.pop('anat')" - ] - }, - { - "cell_type": "code", - "execution_count": 37, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'pre_fs': {'location': {'path': '/flywheel/v0/input/pre_fs/existingFS.zip',\n", - " 'name': 'existingFS.zip'},\n", - " 'base': 'file'},\n", - " 'control_points': {'location': {'path': '/flywheel/v0/input/control_points/control.dat',\n", - " 'name': 'control.dat'},\n", - " 'base': 'file'}}" - ] - }, - "execution_count": 37, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "json_template" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'pre_fs_True': 'pre_fs/existingFS.zip',\n", - " 'pre_fs_False': 'anat/T1.nii.gz',\n", - " 'control_points': 'control_points/control.dat',\n", - " 'annotfile': 'mniroizip/mniroizip.zip',\n", - " 'mniroizip': 'annotfile/annotfile.zip'}" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "fs_json_dict" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys(['pre_fs_True', 'pre_fs_False', 'control_points', 'annotfile', 'mniroizip'])" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "fs_json_dict.keys()" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys(['version', 'pre_fs', 'control_points', 'source_path_fszip', 'precontainer_anat', 'anat_analysis_name', 'precontainer_fmriprep', 'fmriprep_analysis_name', 'prefs_zipname', 'prefs_unzipname', 'annotfile', 'mniroizip'])" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "yaml_info.keys()" - ] - }, - { - "cell_type": "code", - "execution_count": 40, - "metadata": {}, - "outputs": [], - "source": [ - "preproc_json_keys=['ANAT','BVAL','BVEC', 'DIFF','FSMASK']\n", - "preproc_json_val=['ANAT/T1.nii.gz','BVAL/dwiF.bval','BVEC/dwiF.bvec','DIFF/dwiF.nii.gz','FSMASK/brainmask.nii.gz']\n" - ] - }, - { - "cell_type": "code", - "execution_count": 59, - "metadata": {}, - "outputs": [], - "source": [ - "container=\"rtp2-pipeline\"\n", - "pipeline_json_keys=['anatomical','bval','bvec', 'dwi','fs','tractparams']\n", - "pipeline_json_val=['anatomical/T1.nii.gz','bval/dwi.bval','bvec/dwi.bvec','dwi/dwi.nii.gz','fs/fs.zip','tractparams/tractparams.csv']\n" - ] - }, - { - "cell_type": "code", - "execution_count": 65, - "metadata": {}, - "outputs": [], - "source": [ - "def get_config_dict(container,lc_config,rtp2_json_keys,rtp2_json_val):\n", - " config_info_dict = {}\n", - " yaml_info=lc_config[\"container_specific\"][container]\n", - " \n", - " rtp2_json_dict= {key: value for key, value in zip(rtp2_json_keys, rtp2_json_val)}\n", - "\n", - " \n", - " if container == \"freesurferator\":\n", - " config_json_extra={'anat': \n", - " {'location': {\n", - " 'path': '/flywheel/v0/input/anat/T1.nii.gz', \n", - " 'name': 'T1.nii.gz',\n", - " },\n", - " 'base': 'file'}\n", - " }\n", - " for key in rtp2_json_dict.keys():\n", - " if key in yaml_info.keys() and yaml_info[key]:\n", - " config_json_extra[key] = {\n", - " 'location': {\n", - " 'path': op.join('/flywheel/v0/input', rtp2_json_dict[key]), \n", - " 'name': op.basename(rtp2_json_dict[key])\n", - " },\n", - " 'base': 'file'\n", - " }\n", - " if 'anat' in config_json_extra.keys() and 'pre_fs' in config_json_extra.keys():\n", - " config_json_extra.pop('anat')\n", - "\n", - " else:\n", - " config_json_extra={}\n", - " for key in rtp2_json_dict.keys():\n", - " config_json_extra[key] = {\n", - " 'location': {\n", - " 'path': op.join('/flywheel/v0/input', rtp2_json_dict[key]), \n", - " 'name': op.basename(rtp2_json_dict[key])\n", - " },\n", - " 'base': 'file'\n", - " }\n", - " \n", - " config_info_dict['input']= config_json_extra \n", - " return config_info_dict" - ] - }, - { - "cell_type": "code", - "execution_count": 66, - "metadata": {}, - "outputs": [], - "source": [ - "config_info_dict=get_config_dict(container,lc_config,pipeline_json_keys,pipeline_json_val)" - ] - }, - { - "cell_type": "code", - "execution_count": 67, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'input': {'anatomical': {'location': {'path': '/flywheel/v0/input/anatomical/T1.nii.gz',\n", - " 'name': 'T1.nii.gz'},\n", - " 'base': 'file'},\n", - " 'bval': {'location': {'path': '/flywheel/v0/input/bval/dwi.bval',\n", - " 'name': 'dwi.bval'},\n", - " 'base': 'file'},\n", - " 'bvec': {'location': {'path': '/flywheel/v0/input/bvec/dwi.bvec',\n", - " 'name': 'dwi.bvec'},\n", - " 'base': 'file'},\n", - " 'dwi': {'location': {'path': '/flywheel/v0/input/dwi/dwi.nii.gz',\n", - " 'name': 'dwi.nii.gz'},\n", - " 'base': 'file'},\n", - " 'fs': {'location': {'path': '/flywheel/v0/input/fs/fs.zip',\n", - " 'name': 'fs.zip'},\n", - " 'base': 'file'},\n", - " 'tractparams': {'location': {'path': '/flywheel/v0/input/tractparams/tractparams.csv',\n", - " 'name': 'tractparams.csv'},\n", - " 'base': 'file'}}}" - ] - }, - "execution_count": 67, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "config_info_dict" - ] - }, - { - "cell_type": "code", - "execution_count": 71, - "metadata": {}, - "outputs": [ - { - "ename": "KeyError", - "evalue": "'cc'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[71], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mconfig_info_dict\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mcc\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\n", - "\u001b[0;31mKeyError\u001b[0m: 'cc'" - ] - } - ], - "source": [ - "config_info_dict['cc']" - ] - }, - { - "cell_type": "code", - "execution_count": 74, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "in\n" - ] - } - ], - "source": [ - "if \"input\" in config_info_dict:\n", - " print(\"in\")" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import os.path as op\n", - "mni_roi=\"/home/tlei/tlei/FG.nii.gz\"\n", - "file_name = op.basename(mni_roi)\n", - "file_ext = op.splitext(file_name)[1]" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'FG.nii.gz'" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file_name" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'.gz'" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file_ext" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "('FG.nii', '.gz')" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "op.splitext(file_name)" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "lc_config_path='/media/tlei/data/soft/launchcontainers/example_configs/0.3.tlei/new_lc_config.yaml'\n", - "lc_config=read_yaml(lc_config_path)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'general': {'basedir': '/home/tlei/tlei/LMC_DWI_course',\n", - " 'bidsdir_name': 'BIDS',\n", - " 'containerdir': '/home/tlei/tlei/LMC_DWI_course/containers',\n", - " 'container': 'freesurferator',\n", - " 'analysis_name': 'test_on_msi01_for_preparedwi',\n", - " 'host': 'local',\n", - " 'force': True,\n", - " 'print_command_only': False,\n", - " 'log_dir': 'analysis_dir',\n", - " 'log_filename': 'lc_log'},\n", - " 'container_specific': {'anatrois': {'version': '4.6.1-7.3.2',\n", - " 'pre_fs': True,\n", - " 'source_fszip': 'anatrois',\n", - " 'prefs_dir_name': 'anatrois_4.6.1-7.3.2',\n", - " 'prefs_analysis_name': '6prefs_from_fmriprep',\n", - " 'prefs_zipname': '^anatrois_S.*\\\\.zip$',\n", - " 'annotfile': None,\n", - " 'mniroizip': None},\n", - " 'rtppreproc': {'version': '1.2.0-3.0.3',\n", - " 'rpe': True,\n", - " 'precontainer_anat': 'anatrois_4.6.1-7.3.2',\n", - " 'anat_analysis_name': 'fMRIprep_brainmask'},\n", - " 'rtp-pipeline': {'version': '4.5.2-3.0.3',\n", - " 'precontainer_anat': 'anatrois_4.6.1-7.3.2',\n", - " 'anat_analysis_name': 'fulltract_anatrerun',\n", - " 'precontainer_preproc': 'rtppreproc_1.2.0-3.0.3',\n", - " 'preproc_analysis_name': '6sub_wrongbvec'},\n", - " 'freesurferator': {'version': '0.2.0-7.4.1rc19',\n", - " 'pre_fs': True,\n", - " 'source_fszip': 'anatrois',\n", - " 'prefs_dir_name': 'anatrois_4.5.3-7.3.2',\n", - " 'prefs_analysis_name': 'control_points_01',\n", - " 'prefs_zipname': '^anatrois_S.*\\\\.zip$',\n", - " 'control_points': False,\n", - " 'prefs_unzipname': 'S.*$',\n", - " 'annotfile': None,\n", - " 'mniroizip': None},\n", - " 'rtp2-preproc': {'version': '0.1.0_3.0.4rc31',\n", - " 'precontainer_anat': 'anatrois_4.6.1-7.3.2',\n", - " 'anat_analysis_name': 'fMRIprep_brainmask',\n", - " 'rpe': True,\n", - " 'qmap_nifti': None,\n", - " 'qmap_zip': None},\n", - " 'rtp2-pipeline': {'version': '0.1.0_3.0.4rc20',\n", - " 'precontainer_anat': 'anatrois_4.6.1-7.3.2',\n", - " 'anat_analysis_name': 'fulltract_anatrerun',\n", - " 'precontainer_preproc': 'rtppreproc_1.2.0-3.0.3',\n", - " 'preproc_analysis_name': '6sub_wrongbvec',\n", - " 'tractparams': None,\n", - " 'fsmask': None,\n", - " 'qmap_zip': None}},\n", - " 'host_options': {'BCBL': {'use_module': False,\n", - " 'apptainer': 'apptainer/latest',\n", - " 'maxwall': 10,\n", - " 'manager': 'sge',\n", - " 'name': 'anatrois',\n", - " 'cores': 6,\n", - " 'memory': '32G',\n", - " 'processes': 1,\n", - " 'interface': 'lo',\n", - " 'death-timeout': 100,\n", - " 'local-directory': None,\n", - " 'queue': 'long.q',\n", - " 'project': None,\n", - " 'walltime': \"25:30:00'\",\n", - " 'extra': [],\n", - " 'env-extra': [],\n", - " 'job-extra': [],\n", - " 'resource-spec': None,\n", - " 'bind_options': ['/bcbl', '/tmp', '/scratch']},\n", - " 'DIPC': {'memory': '32G',\n", - " 'queue': 'regular',\n", - " 'cores': 24,\n", - " 'walltime': '22:00:00',\n", - " 'use_module': False,\n", - " 'apptainer': 'Singularity/3.5.3-GCC-8.3.0',\n", - " 'manager': 'slurm',\n", - " 'system': 'scratch',\n", - " 'name': 'anatrois',\n", - " 'tmpdir': '/scratch/llecca/tmp',\n", - " 'bind_options': ['/scratch']},\n", - " 'local': {'use_module': False,\n", - " 'apptainer': 'apptainer/latest',\n", - " 'bind_options': ['/bcbl', '/tmp', '/scratch', '/export'],\n", - " 'manager': 'local',\n", - " 'launch_mode': 'serial',\n", - " 'njobs': 5,\n", - " 'memory_limit': '32GiB',\n", - " 'threads_per_worker': 6}}}" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "lc_config" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [], - "source": [ - " # read parameters from lc_config\n", - " basedir = lc_config['general']['basedir']\n", - " container = lc_config['general']['container']\n", - " force = lc_config[\"general\"][\"force\"]\n", - " analysis_name= lc_config['general']['analysis_name']\n", - " run_lc = False\n", - " force= force or run_lc \n", - " version = lc_config[\"container_specific\"][container][\"version\"] \n", - " bidsdir_name = lc_config['general']['bidsdir_name'] \n", - " container_folder = op.join(basedir, bidsdir_name,'derivatives',f'{container}_{version}')\n", - " if not op.isdir(container_folder):\n", - " os.makedirs(container_folder)" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [], - "source": [ - " analysis_dir = op.join(\n", - " container_folder,\n", - " f\"analysis-{analysis_name}\",\n", - " )\n", - " if not op.isdir(analysis_dir):\n", - " os.makedirs(analysis_dir)" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [], - "source": [ - " container_configs_under_analysis_folder = op.join(analysis_dir,os.path.basename('/media/tlei/data/soft/launchcontainers/example_configs/container_specific_example_configs/freesurferator/0.2.0_7.4.1rc19/example_config.json'))" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'/home/tlei/tlei/LMC_DWI_course/BIDS/derivatives/freesurferator_0.2.0-7.4.1rc19/analysis-test_on_msi01_for_preparedwi/example_config.json'" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "container_configs_under_analysis_folder" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "metadata": {}, - "outputs": [], - "source": [ - " container_specific_configs={}\n", - " container_specific_configs['config']=container_configs_under_analysis_folder\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": 39, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'config': '/home/tlei/tlei/LMC_DWI_course/BIDS/derivatives/freesurferator_0.2.0-7.4.1rc19/analysis-test_on_msi01_for_preparedwi/example_config.json',\n", - " 'freesurferator': {'ANAT': 'ANAT/T1.nii.gz',\n", - " 'BVAL': 'BVAL/dwiF.bval',\n", - " 'BVEC': 'BVEC/dwiF.bvec',\n", - " 'DIFF': 'DIFF/dwiF.nii.gz',\n", - " 'FSMASK': 'FSMASK/brain.nii.gz'}}" - ] - }, - "execution_count": 39, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "container_specific_configs" - ] - }, - { - "cell_type": "code", - "execution_count": 41, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys(['ANAT', 'BVAL', 'BVEC', 'DIFF', 'FSMASK'])" - ] - }, - "execution_count": 41, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "container_specific_configs[container]['anat']['location']['name']" - ] - }, - { - "cell_type": "code", - "execution_count": 36, - "metadata": {}, - "outputs": [], - "source": [ - " preproc_json_keys=['ANAT','BVAL','BVEC', 'DIFF','FSMASK']\n", - " preproc_json_val=['ANAT/T1.nii.gz','BVAL/dwiF.bval','BVEC/dwiF.bvec','DIFF/dwiF.nii.gz','FSMASK/brain.nii.gz']\n", - " \n", - " container_specific_configs[container]= {key: value for key, value in zip(preproc_json_keys, preproc_json_val)}\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'ANAT': 'ANAT/T1.nii.gz',\n", - " 'BVAL': 'BVAL/dwiF.bval',\n", - " 'BVEC': 'BVEC/dwiF.bvec',\n", - " 'DIFF': 'DIFF/dwiF.nii.gz',\n", - " 'FSMASK': 'FSMASK/brain.nii.gz'}" - ] - }, - "execution_count": 38, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "container_specific_configs[container]" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": {}, - "outputs": [], - "source": [ - " def process_optional_input(container,file_path, analysis_dir, option=None):\n", - " if os.path.isfile(file_path):\n", - " logger.info(\"\\n\"\n", - " +f\" You have choossen to pass {file_path} to {container}, it will be first copy to {analysis_dir}\")\n", - " else:\n", - " logger.error(\"\\n\"\n", - " f\"{file_path} does not exist\") \n", - " \n", - " file_name = os.path.basename(file_path)\n", - " file_ext = os.path.splitext(file_name)[1]\n", - " \n", - " if container in ['anatrois','freesurferator']:\n", - " if file_ext in ['.nii', '.gz','.zip']:\n", - " do.copy_file(file_path, os.path.join(analysis_dir, file_name), force) \n", - " else:\n", - " raise ValueError(\"Unsupported file type.\")\n", - " if container in ['rtp2-preproc']:\n", - " if file_ext in ['.nii', '.gz']:\n", - " do.copy_file(file_path, os.path.join(analysis_dir, file_name), force) \n", - " else:\n", - " raise ValueError(\"Unsupported file type.\")\n", - " \n", - " if container in ['rtp2-pipeline']:\n", - " if option == \"tractparams\":\n", - " if file_ext in ['.csv']: \n", - " do.copy_file(file_path, os.path.join(analysis_dir, file_name), force)\n", - " else:\n", - " raise ValueError(\"Unsupported file type.\") \n", - " if option == \"fsmask\":\n", - " if file_ext in ['.nii', '.gz']:\n", - " do.copy_file(file_path, os.path.join(analysis_dir, file_name), force) \n", - " else:\n", - " raise ValueError(\"Unsupported file type.\") \n", - " if option == \"qmap_zip\":\n", - " if file_ext == '.zip':\n", - " do.copy_file(file_path, os.path.join(analysis_dir, file_name), force) \n", - " else:\n", - " raise ValueError(\"Unsupported file type.\")\n", - " return file_name\n", - " \n", - " # copy annotfile or mnizip file to analysis folder\n", - " if container in ['anatrois']:\n", - " pre_fs= lc_config[\"container_specific\"][container][\"pre_fs\"]\n", - " annotfile = lc_config[\"container_specific\"][container][\"annotfile\"]\n", - " mniroizip = lc_config[\"container_specific\"][container][\"mniroizip\"]\n", - " if pre_fs:\n", - " file_name=\"existingFS.zip\"\n", - " container_specific_configs[container]['pre_fs']=f\"pre_fs/{file_name}\" \n", - " if annotfile:\n", - " file_name=process_optional_input(container,annotfile,analysis_dir)\n", - " container_specific_configs[container]['annotfile']=f\"annotfile/{file_name}\"\n", - " if mniroizip:\n", - " file_name=process_optional_input(container,mniroizip,analysis_dir)\n", - " container_specific_configs[container]['mniroizip']=f\"mniroizip/{file_name}\"\n", - " # copy annotfile or mnizip file to analysis folder\n", - " if container in ['freesurferator']:\n", - " pre_fs= lc_config[\"container_specific\"][container][\"pre_fs\"]\n", - " control_points= lc_config[\"container_specific\"][container][\"control_points\"]\n", - " annotfile = lc_config[\"container_specific\"][container][\"annotfile\"]\n", - " mniroizip = lc_config[\"container_specific\"][container][\"mniroizip\"]\n", - " if pre_fs:\n", - " file_name=\"existingFS.zip\"\n", - " container_specific_configs[container]['pre_fs']=f\"pre_fs/{file_name}\"\n", - " if control_points:\n", - " file_name=\"control.dat\"\n", - " container_specific_configs[container]['control_points']=f\"control_points/{file_name}\" \n", - " if annotfile:\n", - " file_name=process_optional_input(container,annotfile,analysis_dir)\n", - " container_specific_configs[container]['annotfile']=f\"annotfile/{file_name}\"\n", - " if mniroizip:\n", - " file_name=process_optional_input(container,mniroizip,analysis_dir)\n", - " container_specific_configs[container]['mniroizip']=f\"mniroizip/{file_name}\"\n", - " # copy qmap.nii of qmap.nii.gz to analysis folder\n", - " if container in ['rtppreproc']:\n", - " preproc_json_keys=['ANAT','BVAL','BVEC', 'DIFF','FSMASK']\n", - " preproc_json_val=['ANAT/T1.nii.gz','BVAL/dwiF.bval','BVEC/dwiF.bvec','DIFF/dwiF.nii.gz','FSMASK/brain.nii.gz']\n", - " container_specific_configs[container]= {key: value for key, value in zip(preproc_json_keys, preproc_json_val)}\n", - " rpe=lc_config[\"container_specific\"][container][\"rpe\"]\n", - " if rpe:\n", - " container_specific_configs[container]['RBVC']= 'RBVC/dwiR.bvec'\n", - " container_specific_configs[container]['RBVL']= 'RBVL/dwiR.bval'\n", - " container_specific_configs[container]['RDIF']= 'RDIF/dwiR.nii.gz' \n", - " if container in ['rtp2-preproc']:\n", - " preproc_json_keys=['ANAT','BVAL','BVEC', 'DIFF','FSMASK']\n", - " preproc_json_val=['ANAT/T1.nii.gz','BVAL/dwiF.bval','BVEC/dwiF.bvec','DIFF/dwiF.nii.gz','FSMASK/brain.nii.gz']\n", - " container_specific_configs[container]= {key: value for key, value in zip(preproc_json_keys, preproc_json_val)}\n", - " \n", - " rpe=lc_config[\"container_specific\"][container][\"rpe\"]\n", - " qmap=lc_config[\"container_specific\"][container][\"qmap_nifti\"]\n", - " if rpe:\n", - " container_specific_configs[container]['RBVC']= 'RBVC/dwiR.bvec'\n", - " container_specific_configs[container]['RBVL']= 'RBVL/dwiR.bval'\n", - " container_specific_configs[container]['RDIF']= 'RDIF/dwiR.nii.gz' \n", - " if qmap:\n", - " file_name=process_optional_input(container,qmap,analysis_dir)\n", - " container_specific_configs[container]['qmap']=f\"qmap/{file_name}\"\n", - " \n", - " if container in ['rtp-pipeline']:\n", - " pipeline_json_keys=['anatomical','bval','bvec', 'dwi','fs']\n", - " pipeline_json_val=['anatomical/T1.nii.gz','bval/dwi.bval','bvec/dwi.bvec','dwi/dwi.nii.gz','fs/fs.zip']\n", - " container_specific_configs[container]= {key: value for key, value in zip(pipeline_json_keys, pipeline_json_val)}\n", - " \n", - " tractparams=lc_config[\"container_specific\"][container][\"tractparams\"]\n", - " if tractparams:\n", - " file_name=process_optional_input(container,tractparams,analysis_dir,\"tractparams\") \n", - " container_specific_configs[container]['tractparams']=f\"tractparams/{file_name}\"\n", - " if container in ['rtp2-pipeline']:\n", - " pipeline_json_keys=['anatomical','bval','bvec', 'dwi','fs']\n", - " pipeline_json_val=['anatomical/T1.nii.gz','bval/dwi.bval','bvec/dwi.bvec','dwi/dwi.nii.gz','fs/fs.zip']\n", - " container_specific_configs[container]= {key: value for key, value in zip(pipeline_json_keys, pipeline_json_val)} \n", - " tractparams=lc_config[\"container_specific\"][container][\"tractparams\"]\n", - " fsmask=lc_config[\"container_specific\"][container][\"fsmask\"]\n", - " qmap_zip=lc_config[\"container_specific\"][container][\"qmap_zip\"]\n", - " if tractparams:\n", - " file_name=process_optional_input(container,tractparams,analysis_dir,\"tractparams\")\n", - " container_specific_configs[container]['tractparams']=f\"tractparams/{file_name}\"\n", - " if fsmask:\n", - " file_name=process_optional_input(container,fsmask,analysis_dir,\"fsmask\")\n", - " container_specific_configs[container]['fsmask']=f\"fsmask/{file_name}\"\n", - " if qmap_zip:\n", - " file_name=process_optional_input(container,qmap_zip,analysis_dir,\"qmap_zip\") \n", - " container_specific_configs[container]['qmap_zip']=f\"qmap_zip/{file_name}\" " - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'freesurferator'" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "container" - ] - }, - { - "cell_type": "code", - "execution_count": 42, - "metadata": {}, - "outputs": [], - "source": [ - "dict= {\n", - " \"inputs\": {\n", - " \"anat\": {\n", - " \"location\": {\n", - " \"path\": \"/flywheel/v0/input/anat/T1.nii.gz\",\n", - " \"name\": \"T1.nii.gz\"\n", - " },\n", - " \"base\": \"file\"\n", - " },\n", - " \"pre_fs\": {\n", - " \"location\": {\n", - " \"path\": \"/flywheel/v0/input/pre_fs/existingFS.zip\",\n", - " \"name\": \"existingFS.zip\"\n", - " },\n", - " \"base\": \"file\"\n", - " },\n", - " \"control_points\": {\n", - " \"location\": {\n", - " \"path\": \"/flywheel/v0/input/control_points/control.dat\",\n", - " \"name\": \"control.dat\"\n", - " },\n", - " \"base\": \"file\"\n", - " },\n", - " \"mniroizip\": {\n", - " \"location\": {\n", - " \"path\": \"/flywheel/v0/input/mniroizip/FG.nii.gz\",\n", - " \"name\": \"FG.nii.gz\"\n", - " },\n", - " \"base\": \"file\"\n", - " }\n", - "}\n", - "}" - ] - }, - { - "cell_type": "code", - "execution_count": 44, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'T1.nii.gz'" - ] - }, - "execution_count": 44, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dict['inputs']['anat']['location']['name']" - ] - }, - { - "cell_type": "code", - "execution_count": 50, - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "from bids import BIDSLayout\n", - "layout=BIDSLayout(os.path.join('/home/tlei/tlei/multishell_dwi/nifti'))\n" - ] - }, - { - "cell_type": "code", - "execution_count": 57, - "metadata": {}, - "outputs": [], - "source": [ - "dwi_dir=layout.get(subject= \"SM003\", session=\"001\", extension='nii.gz',suffix= 'dwi', direction=\"AP\", return_type='filename')" - ] - }, - { - "cell_type": "code", - "execution_count": 58, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['/home/tlei/tlei/multishell_dwi/nifti/sub-SM003/ses-001/dwi/sub-SM003_ses-001_acq-b1000_dir-AP_dwi.nii.gz',\n", - " '/home/tlei/tlei/multishell_dwi/nifti/sub-SM003/ses-001/dwi/sub-SM003_ses-001_acq-b2500_dir-AP_dwi.nii.gz']" - ] - }, - "execution_count": 58, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dwi_dir" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "if len(dwi_dir) > 1:\n", - " dwi_acq = [f for f in dwi_dir if 'acq-' in f]\n", - " if len(dwi_acq) == 0:\n", - " print(f\"No files with different acq- to concatenate.\\n\")\n", - " elif len(dwi_acq) == 1:\n", - " print(f\"Found only {dwi_acq[0]} to concatenate. There must be at least two files with different acq.\\n\")\n", - " elif len(dwi_acq) > 1:\n", - " if not os.path.isfile(srcFileDwi_nii):\n", - " print(f\"Concatenating with mrcat of mrtrix3 these files: {dwi_acq} in: {srcFileDwi_nii} \\n\")\n", - " dwi_acq.sort()\n", - " sp.run(['mrcat',*dwi_acq,srcFileDwi_nii])" - ] - }, - { - "cell_type": "code", - "execution_count": 59, - "metadata": {}, - "outputs": [ - { - "ename": "SyntaxError", - "evalue": "can't use starred expression here (162654131.py, line 1)", - "output_type": "error", - "traceback": [ - "\u001b[0;36m Cell \u001b[0;32mIn[59], line 1\u001b[0;36m\u001b[0m\n\u001b[0;31m *dwi_acq\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m can't use starred expression here\n" - ] - } - ], - "source": [ - "*dwi_acq" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "lc", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.14" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -}