Skip to content

Commit

Permalink
Fixed black lint
Browse files Browse the repository at this point in the history
  • Loading branch information
glaubervila committed Feb 15, 2024
1 parent bd8cea2 commit 7132e58
Show file tree
Hide file tree
Showing 10 changed files with 57 additions and 42 deletions.
1 change: 1 addition & 0 deletions backend/common/docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Funcoes para converter as estatisticas de um container,
retiradas deste repositorio: https://github.com/TomasTomecek/sen/blob/master/sen/util.py#L162
"""

import functools
import logging
import os
Expand Down
12 changes: 6 additions & 6 deletions backend/common/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,17 +271,17 @@ def download_file(request):
response = HttpResponse(
fh.read(), content_type="application/octet-stream"
)
response[
"Content-Disposition"
] = "inline; filename=" + os.path.basename(new_file)
response["Content-Disposition"] = (
"inline; filename=" + os.path.basename(new_file)
)
return response
else:

with open(filepath, "rb") as fh:
response = HttpResponse(
fh.read(), content_type="application/octet-stream"
)
response[
"Content-Disposition"
] = "inline; filename=" + os.path.basename(filepath)
response["Content-Disposition"] = (
"inline; filename=" + os.path.basename(filepath)
)
return response
2 changes: 1 addition & 1 deletion backend/coreAdmin/celery.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
app.conf.beat_schedule = {
"garbage-collector-every-three-hours": {
"task": "tno.tasks.garbage_collector",
"schedule": crontab(minute=0, hour="*/3")
"schedule": crontab(minute=0, hour="*/3"),
# 'schedule': 30.0
},
"prediction-map-every-hour": {
Expand Down
1 change: 1 addition & 0 deletions backend/coreAdmin/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))y
"""

from common import views as common_views
from des.views import (
CcdViewSet,
Expand Down
1 change: 1 addition & 0 deletions backend/des/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ class ExposureAdmin(admin.ModelAdmin):
)

search_fields = ("id",)

# This will help you to disbale add functionality
def has_add_permission(self, request):
return False
Expand Down
8 changes: 5 additions & 3 deletions backend/des/skybot/import_positions.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,9 +317,11 @@ def import_des_skybot_positions(self, exposure_id, ticket, filepath, job_id):
for ccd in ccds:

df_positions["ccd_id"] = df_positions.apply(
lambda row: row["ccd_id"]
if row["ccd_id"] != 0
else self.in_ccd(ccd, row["raj2000"], row["decj2000"]),
lambda row: (
row["ccd_id"]
if row["ccd_id"] != 0
else self.in_ccd(ccd, row["raj2000"], row["decj2000"])
),
axis=1,
)

Expand Down
6 changes: 3 additions & 3 deletions backend/des/skybot/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -1614,9 +1614,9 @@ def consolidate(self, job_id, aborted=False):
# Atualiza os totais do job
job["positions"] = self.dsdao.t_positions_des_by_job(job["id"])
job["asteroids"] = self.dsdao.t_unique_objects_by_job(job["id"])
job[
"exposures_with_asteroid"
] = self.dsdao.t_exposures_with_objects_by_job(job["id"])
job["exposures_with_asteroid"] = (
self.dsdao.t_exposures_with_objects_by_job(job["id"])
)
job["ccds_with_asteroid"] = self.dsdao.t_ccds_with_objects_by_job(
job["id"]
)
Expand Down
28 changes: 14 additions & 14 deletions backend/old_apps/orbit/refine_orbit.py
Original file line number Diff line number Diff line change
Expand Up @@ -633,20 +633,20 @@ def copy_observation_file(self, obj, obj_dir):
shutil.copy2(original_observation_file, new_file_path)

if os.path.exists(new_file_path):
self.results["objects"][obj.get("alias")]["inputs"][
"observations"
] = dict(
{
"filename": os.path.basename(new_file_path),
"file_path": new_file_path,
"file_size": os.path.getsize(new_file_path),
"file_type": os.path.splitext(new_file_path)[1],
"date_time": datetime.strftime(
observation.download_finish_time, "%Y-%m-%d %H:%M:%S"
),
"source": observation.source,
"observation_file_id": observation.id,
}
self.results["objects"][obj.get("alias")]["inputs"]["observations"] = (
dict(
{
"filename": os.path.basename(new_file_path),
"file_path": new_file_path,
"file_size": os.path.getsize(new_file_path),
"file_type": os.path.splitext(new_file_path)[1],
"date_time": datetime.strftime(
observation.download_finish_time, "%Y-%m-%d %H:%M:%S"
),
"source": observation.source,
"observation_file_id": observation.id,
}
)
)

self.logger.debug(
Expand Down
8 changes: 5 additions & 3 deletions backend/tno/asteroid_table/asteroid_table_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -758,9 +758,11 @@ def conform_astorb_lowell_obs_dynclass(dataframe, log):
log.debug(
f"Remaining Time: {remaining_time:.0f} seconds..."
if remaining_time <= 60
else f"Remaining Time: {remaining_time/60:.0f} minutes..."
if remaining_time <= 3600
else f"Remaining Time: {remaining_time/3600:.0f} hours..."
else (
f"Remaining Time: {remaining_time/60:.0f} minutes..."
if remaining_time <= 3600
else f"Remaining Time: {remaining_time/3600:.0f} hours..."
)
)

try:
Expand Down
32 changes: 20 additions & 12 deletions orbit_trace/src/asteroid.py
Original file line number Diff line number Diff line change
Expand Up @@ -909,18 +909,26 @@ def register_occultations(self, start_period: str, end_period: str, jobid: int):
new_row.update(
{
"have_path_coeff": True,
"occ_path_min_longitude": float(occ_coeff["min_longitude"])
if occ_coeff["min_longitude"] != None
else None,
"occ_path_max_longitude": float(occ_coeff["max_longitude"])
if occ_coeff["max_longitude"] != None
else None,
"occ_path_min_latitude": float(occ_coeff["min_latitude"])
if occ_coeff["min_latitude"] != None
else None,
"occ_path_max_latitude": float(occ_coeff["max_latitude"])
if occ_coeff["max_latitude"] != None
else None,
"occ_path_min_longitude": (
float(occ_coeff["min_longitude"])
if occ_coeff["min_longitude"] != None
else None
),
"occ_path_max_longitude": (
float(occ_coeff["max_longitude"])
if occ_coeff["max_longitude"] != None
else None
),
"occ_path_min_latitude": (
float(occ_coeff["min_latitude"])
if occ_coeff["min_latitude"] != None
else None
),
"occ_path_max_latitude": (
float(occ_coeff["max_latitude"])
if occ_coeff["max_latitude"] != None
else None
),
"occ_path_is_nightside": bool(occ_coeff["nightside"]),
"occ_path_coeff": json.dumps(occ_coeff),
}
Expand Down

0 comments on commit 7132e58

Please sign in to comment.