Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

set up file logger with timestamp #920

Draft
wants to merge 3 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion AFQ/api/bundle_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def __init__(self,
for bundle_name in bundle_info:
self.bundle_names.append(bundle_name)

self.logger = logging.getLogger('AFQ')
self.logger = logging.getLogger(__name__)

if self.seg_algo == "afq":
if "FP" in self.bundle_names\
Expand Down
2 changes: 1 addition & 1 deletion AFQ/api/participant.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def __init__(self,
"unrecognized parameter tractography_params, "
"did you mean tracking_params ?"))

self.logger = logging.getLogger('AFQ')
self.logger = logging.getLogger(__name__)
self.output_dir = output_dir

self.kwargs = dict(
Expand Down
12 changes: 6 additions & 6 deletions AFQ/data/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def read_callosum_templates(as_img=True, resample_to=False):
dict with: keys: names of template ROIs and values: nibabel Nifti1Image
objects from each of the ROI nifti files.
"""
logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)

logger.debug('loading callosum templates')
tic = time.perf_counter()
Expand Down Expand Up @@ -555,7 +555,7 @@ def read_templates(as_img=True, resample_to=False):
dict with: keys: names of template ROIs and values: nibabel Nifti1Image
objects from each of the ROI nifti files.
"""
logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)
logger.debug('loading AFQ templates')
tic = time.perf_counter()

Expand Down Expand Up @@ -649,7 +649,7 @@ def read_or_templates(as_img=True, resample_to=False):
dict with: keys: names of template ROIs and values: nibabel Nifti1Image
objects from each of the ROI nifti files.
"""
logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)

logger.debug('loading or templates')
tic = time.perf_counter()
Expand Down Expand Up @@ -795,7 +795,7 @@ def organize_stanford_data(path=None, clear_previous_afq=False):
If clear_previous_afq is True and there is an afq folder in derivatives,
it will be removed.
"""
logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)

# fetches data for first subject and session
logger.info('fetching Stanford HARDI data')
Expand Down Expand Up @@ -1196,7 +1196,7 @@ def bundles_to_aal(bundles, atlas=None):
targets[bundle + "_" + region_name] = nib.Nifti1Image(
aal_roi, atlas.affine)
else:
logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)
logger.warning(f"Segmentation end points undefined for {bundle},"
+ " continuing without end points")
targets[bundle + "_start"] = None
Expand Down Expand Up @@ -1317,7 +1317,7 @@ def read_ukbb_fa_template(mask=True):
)

if not op.exists(fa_path):
logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)
logger.warning(
"Downloading brain MRI group mean statistics from UK Biobank. "
+ "This download is approximately 1.1 GB. "
Expand Down
2 changes: 1 addition & 1 deletion AFQ/definitions/mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
__all__ = ["FnirtMap", "SynMap", "SlrMap", "AffMap", "ItkMap"]


logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)


# For map defintions, get_for_subses should return only the mapping
Expand Down
2 changes: 1 addition & 1 deletion AFQ/segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
__all__ = ["Segmentation", "clean_bundle", "clean_by_endpoints"]


logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)


def _resample_tg(tg, n_points):
Expand Down
3 changes: 1 addition & 2 deletions AFQ/tasks/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,7 @@
__all__ = ["as_file", "as_fit_deriv", "as_img"]


logger = logging.getLogger('AFQ')
logger.setLevel(logging.INFO)
logger = logging.getLogger(__name__)


# get args and kwargs from function
Expand Down
2 changes: 1 addition & 1 deletion AFQ/tasks/mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from dipy.io.stateful_tractogram import Space


logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)


@pimms.calc("b0_warped")
Expand Down
2 changes: 1 addition & 1 deletion AFQ/tasks/segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from dipy.tracking.streamline import set_number_of_points, values_from_volume


logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)


@pimms.calc("bundles")
Expand Down
2 changes: 1 addition & 1 deletion AFQ/tasks/tractography.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from AFQ.tasks.utils import get_default_args
from AFQ.definitions.image import ScalarImage

logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)


@pimms.calc("seed")
Expand Down
2 changes: 1 addition & 1 deletion AFQ/tasks/viz.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

from plotly.subplots import make_subplots

logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)


def _viz_prepare_vol(vol, xform, mapping, scalar_dict):
Expand Down
2 changes: 1 addition & 1 deletion AFQ/tractography.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def track(params_file, directions="prob", max_angle=30., sphere=None,
Descoteaux, M. Towards quantitative connectivity analysis: reducing
tractography biases. NeuroImage, 98, 266-278, 2014.
"""
logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)

logger.info("Loading Image...")
if isinstance(params_file, str):
Expand Down
3 changes: 3 additions & 0 deletions AFQ/utils/bin.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import platform
import os.path as op
import os
import logging
logger = logging.getLogger(__name__)

from argparse import ArgumentParser
from funcargparse import FuncArgParser
Expand Down Expand Up @@ -298,6 +300,7 @@ def parse_config_run_afq(toml_file, default_arg_dict, to_call="export_all",
with open(afq_metadata_file, 'w') as ff:
ff.write(dict_to_toml(default_arg_dict))

logger.info("Starting to create the GroupAFQ object")
myafq = GroupAFQ(bids_path, **kwargs)

# call user specified function:
Expand Down
2 changes: 1 addition & 1 deletion AFQ/utils/volume.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from dipy.tracking.streamline import select_random_set_of_streamlines
import dipy.tracking.utils as dtu

logger = logging.getLogger('AFQ')
logger = logging.getLogger(__name__)


def transform_inverse_roi(roi, mapping, bundle_name="ROI"):
Expand Down
2 changes: 1 addition & 1 deletion AFQ/viz/fury_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
except (ImportError, ModuleNotFoundError):
raise ImportError(vut.viz_import_msg_error("fury"))

viz_logger = logging.getLogger("AFQ")
viz_logger = logging.getLogger(__name__)


def _inline_interact(scene, inline, interact):
Expand Down
2 changes: 1 addition & 1 deletion AFQ/viz/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ def __init__(self, out_folder, csv_fnames, names, is_special="",
Can be 'ICC1, 'ICC2', 'ICC3', 'ICC1k', 'ICC2k', 'ICC3k'.
Default: "ICC2"
"""
self.logger = logging.getLogger('AFQ')
self.logger = logging.getLogger(__name__)
self.ICC_func = ICC_func
if "k" in self.ICC_func:
self.ICC_func_name = f"ICC({self.ICC_func[3]},k)"
Expand Down
2 changes: 1 addition & 1 deletion AFQ/viz/plotly_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@


scope = pio.kaleido.scope
viz_logger = logging.getLogger("AFQ")
viz_logger = logging.getLogger(__name__)


def _inline_interact(figure, show, show_inline):
Expand Down
2 changes: 1 addition & 1 deletion AFQ/viz/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

__all__ = ["Viz"]

viz_logger = logging.getLogger("AFQ")
viz_logger = logging.getLogger(__name__)
tableau_20 = [
(0.12156862745098039, 0.4666666666666667, 0.7058823529411765),
(0.6823529411764706, 0.7803921568627451, 0.9098039215686274),
Expand Down
41 changes: 27 additions & 14 deletions bin/pyAFQ
Original file line number Diff line number Diff line change
@@ -1,21 +1,8 @@
#!/usr/bin/env python

import warnings
import os.path as op
from argparse import ArgumentParser


with warnings.catch_warnings():
warnings.simplefilter("ignore")

print("Loading AFQ libraries...")

import AFQ
import AFQ.utils.bin as afb

import logging
logger = logging.getLogger('AFQ')
logger.setLevel(level=logging.INFO)
import logging

usage = \
"""pyAFQ /path/to/afq_config.toml
Expand Down Expand Up @@ -97,6 +84,10 @@ def parse_cli(arg_dict):
cli_parser.add_argument(
'-t', '--notrack', action="store_true", default=False,
help="Disable the use of pyAFQ being recorded by Google Analytics. ")

cli_parser.add_argument(
'-l', '--log', default=None,
help="Path to log file to save the outputs of pyAFQ.")

opts = cli_parser.parse_args()

Expand All @@ -109,12 +100,34 @@ def parse_cli(arg_dict):
"flag when using the pyAFQ CLI")
import popylar
popylar.track_event(AFQ._ga_id, "pyAFQ_cli", "CLI called")

if opts.log is not None:
# Create handler
f_handler = logging.FileHandler(opts.log)
f_handler.setLevel(logging.INFO)
# Create formatter and add it to handler
f_format = logging.Formatter(
'%(asctime)s - %(name)s:%(levelname)s:%(message)s')
f_handler.setFormatter(f_format)
# Add handler to the logger
logger.addHandler(f_handler)
print(f'Logging to file: {opts.log}')
logger.info("Starting pyAFQ")

return opts.config, opts.generate_toml, opts.overwrite,\
opts.verbose, opts.dry_run, opts.to_call, opts.generate_json


if __name__ == '__main__':
logger = logging.getLogger()
logger.setLevel(level=logging.INFO)

with warnings.catch_warnings():
warnings.simplefilter("ignore")
print("Loading AFQ libraries...")
# import AFQ
import AFQ.utils.bin as afb

arg_dict = afb.func_dict_to_arg_dict(logger=logger)
config_file, generate_only, overwrite, verbose,\
dry_run, to_call, generate_json =\
Expand Down
2 changes: 1 addition & 1 deletion examples/plot_callosal_tract_profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
# Ensure segmentation logging information is included in this example's output
root = logging.getLogger()
root.setLevel(logging.ERROR)
logging.getLogger('AFQ').setLevel(logging.INFO)
logging.getLogger(__name__).setLevel(logging.INFO)
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

unless really necessary, it'd be better not to redefine the logger level in a file like that

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is an example, so its showing what the user can do. So I think it's good to show the getlogger and setlevel stuff. This file is not imported by any other file so it shouldn't affect anything else. That said I am not sure why there is this root logger thing.

handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.INFO)
root.addHandler(handler)
Expand Down