Skip to content

Commit

Permalink
Merge pull request #2077 from dandi/enh-logging
Browse files Browse the repository at this point in the history
Use a dedicated logger. not top level logging. module
  • Loading branch information
yarikoptic authored Nov 20, 2024
2 parents 8665adc + a509ab1 commit 0a4bf70
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 11 deletions.
14 changes: 8 additions & 6 deletions dandiapi/api/doi.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
(settings.DANDI_DOI_API_PREFIX, 'DANDI_DOI_API_PREFIX'),
]

logger = logging.getLogger(__name__)


def doi_configured() -> bool:
return any(setting is not None for setting, _ in DANDI_DOI_SETTINGS)
Expand Down Expand Up @@ -51,10 +53,10 @@ def create_doi(version: Version) -> str:
timeout=30,
).raise_for_status()
except requests.exceptions.HTTPError as e:
logging.exception('Failed to create DOI %s', doi)
logging.exception(request_body)
logger.exception('Failed to create DOI %s', doi)
logger.exception(request_body)
if e.response:
logging.exception(e.response.text)
logger.exception(e.response.text)
raise
return doi

Expand All @@ -70,13 +72,13 @@ def delete_doi(doi: str) -> None:
r.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response and e.response.status_code == requests.codes.not_found:
logging.warning('Tried to get data for nonexistent DOI %s', doi)
logger.warning('Tried to get data for nonexistent DOI %s', doi)
return
logging.exception('Failed to fetch data for DOI %s', doi)
logger.exception('Failed to fetch data for DOI %s', doi)
raise
if r.json()['data']['attributes']['state'] == 'draft':
try:
s.delete(doi_url).raise_for_status()
except requests.exceptions.HTTPError:
logging.exception('Failed to delete DOI %s', doi)
logger.exception('Failed to delete DOI %s', doi)
raise
12 changes: 7 additions & 5 deletions dandiapi/api/views/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@

supported_digests = {'dandi:dandi-etag': 'etag', 'dandi:sha2-256': 'sha256'}

logger = logging.getLogger(__name__)


class DigestSerializer(serializers.Serializer):
algorithm = serializers.CharField()
Expand Down Expand Up @@ -140,7 +142,7 @@ def upload_initialize_view(request: Request) -> HttpResponseBase:
if dandiset.unembargo_in_progress:
raise DandisetUnembargoInProgressError

logging.info(
logger.info(
'Starting upload initialization of size %s, ETag %s to dandiset %s',
content_size,
etag,
Expand All @@ -155,15 +157,15 @@ def upload_initialize_view(request: Request) -> HttpResponseBase:
headers={'Location': asset_blobs.first().blob_id},
)

logging.info('Blob with ETag %s does not yet exist', etag)
logger.info('Blob with ETag %s does not yet exist', etag)

upload, initialization = Upload.initialize_multipart_upload(etag, content_size, dandiset)
logging.info('Upload of ETag %s initialized', etag)
logger.info('Upload of ETag %s initialized', etag)
upload.save()
logging.info('Upload of ETag %s saved', etag)
logger.info('Upload of ETag %s saved', etag)

response_serializer = UploadInitializationResponseSerializer(initialization)
logging.info('Upload of ETag %s serialized', etag)
logger.info('Upload of ETag %s serialized', etag)
return Response(response_serializer.data)


Expand Down

0 comments on commit 0a4bf70

Please sign in to comment.