Skip to content

Commit

Permalink
Merge pull request #155 from PanDAWMS/dev
Browse files Browse the repository at this point in the history
replace hardcoded atlas_panda* by settings.DB_SCHEMA* & more fixes
  • Loading branch information
tkorchug authored Apr 19, 2023
2 parents d3b42fe + 70a4649 commit 700ae59
Show file tree
Hide file tree
Showing 13 changed files with 371 additions and 293 deletions.
14 changes: 7 additions & 7 deletions core/art/jobSubResults.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,10 +121,10 @@ def lock_nqueuedjobs(cur, nrows):
"""

lock_time = datetime.now().strftime(settings.DATETIME_FORMAT)
lquery = """UPDATE atlas_pandabigmon.art_results_queue
lquery = """UPDATE {}.art_results_queue
SET IS_LOCKED = 1,
LOCK_TIME = to_date('%s', 'YYYY-MM-DD HH24:MI:SS')
WHERE rownum <= %i AND IS_LOCKED = 0""" % (lock_time, nrows)
LOCK_TIME = to_date('{}', 'YYYY-MM-DD HH24:MI:SS')
WHERE rownum <= {} AND IS_LOCKED = 0""".format(settings.DB_SCHEMA, lock_time, nrows)
try:
cur.execute(lquery)
except DatabaseError as e:
Expand All @@ -141,9 +141,9 @@ def delete_queuedjobs(cur, lock_time):
:return:
"""

dquery = """DELETE FROM atlas_pandabigmon.art_results_queue
dquery = """DELETE FROM {}.art_results_queue
WHERE IS_LOCKED = 1
AND LOCK_TIME = to_date('%s', 'YYYY-MM-DD HH24:MI:SS')""" % (lock_time)
AND LOCK_TIME = to_date('{}', 'YYYY-MM-DD HH24:MI:SS')""".format(settings.DB_SCHEMA, lock_time)
try:
cur.execute(dquery)
except DatabaseError as e:
Expand All @@ -160,8 +160,8 @@ def clear_queue(cur):
:return:
"""

cquery = """DELETE FROM atlas_pandabigmon.art_results_queue
WHERE IS_LOCKED = 1"""
cquery = """DELETE FROM {}.art_results_queue
WHERE IS_LOCKED = 1""".format(settings.DB_SCHEMA)
try:
cur.execute(cquery)
except DatabaseError as e:
Expand Down
9 changes: 5 additions & 4 deletions core/art/modelsART.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from __future__ import unicode_literals
from django.db import models
from django.conf import settings


class ARTResults(models.Model):
Expand All @@ -20,7 +21,7 @@ class ARTResults(models.Model):
lock_time = models.DateTimeField(null=True, db_column='lock_time', blank=True)

class Meta:
db_table = u'"ATLAS_PANDABIGMON"."ART_RESULTS"'
db_table = f'"{settings.DB_SCHEMA}"."ART_RESULTS"'


class ARTSubResult(models.Model):
Expand All @@ -29,7 +30,7 @@ class ARTSubResult(models.Model):
result = models.TextField(db_column='RESULT_JSON', blank=True)

class Meta:
db_table = u'"ATLAS_PANDABIGMON"."ART_SUBRESULT"'
db_table = f'"{settings.DB_SCHEMA}"."ART_SUBRESULT"'


class ARTResultsQueue(models.Model):
Expand All @@ -38,7 +39,7 @@ class ARTResultsQueue(models.Model):
is_locked = models.IntegerField(db_column='is_locked')
lock_time = models.DateTimeField(null=True, db_column='lock_time', blank=True)
class Meta:
db_table = u'"ATLAS_PANDABIGMON"."ART_RESULTS_QUEUE"'
db_table = f'"{settings.DB_SCHEMA}"."ART_RESULTS_QUEUE"'


class ARTTests(models.Model):
Expand All @@ -56,4 +57,4 @@ class ARTTests(models.Model):

# subresult = models.OneToOneField('ARTSubResult', related_name='pandaid_sr', on_delete=models.DO_NOTHING, db_column='pandaid')
class Meta:
db_table = u'"ATLAS_PANDABIGMON"."ART_TESTS"'
db_table = f'"{settings.DB_SCHEMA}"."ART_TESTS"'
32 changes: 16 additions & 16 deletions core/art/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,8 +146,8 @@ def artOverview(request):
# quering data from dedicated SQL function
query_raw = """
SELECT package, branch, ntag, nightly_tag, status, result, pandaid, testname, attemptmark
FROM table(ATLAS_PANDABIGMON.ARTTESTS_LIGHT('{}','{}','{}'))
""".format(query['ntag_from'], query['ntag_to'], query['strcondition'])
FROM table({}.ARTTESTS_LIGHT('{}','{}','{}'))
""".format(settings.DB_SCHEMA, query['ntag_from'], query['ntag_to'], query['strcondition'])
cur = connection.cursor()
cur.execute(query_raw)
tasks_raw = cur.fetchall()
Expand Down Expand Up @@ -275,8 +275,8 @@ def artTasks(request):
cur = connection.cursor()
query_raw = """
SELECT package, branch, ntag, nightly_tag, pandaid, testname, taskid, status, result, attemptmark
FROM table(ATLAS_PANDABIGMON.ARTTESTS_LIGHT('{}','{}','{}'))
""".format(query['ntag_from'], query['ntag_to'], query['strcondition'])
FROM table({}.ARTTESTS_LIGHT('{}','{}','{}'))
""".format(settings.DB_SCHEMA, query['ntag_from'], query['ntag_to'], query['strcondition'])
cur.execute(query_raw)
tasks_raw = cur.fetchall()
cur.close()
Expand Down Expand Up @@ -442,8 +442,8 @@ def artJobs(request):
c.attemptmark,
c.inputfileid,
c.extrainfo
FROM table(ATLAS_PANDABIGMON.ARTTESTS('{}','{}','{}')) c
""".format(query['ntag_from'], query['ntag_to'], query['strcondition'])
FROM table({}.ARTTESTS('{}','{}','{}')) c
""".format(settings.DB_SCHEMA, query['ntag_from'], query['ntag_to'], query['strcondition'])
cur.execute(query_raw)
jobs = cur.fetchall()
cur.close()
Expand Down Expand Up @@ -754,8 +754,8 @@ def artStability(request):
c.pandaid,
c.result,
c.attemptmark
FROM table(ATLAS_PANDABIGMON.ARTTESTS_LIGHT('{}','{}','{}')) c
""".format(query['ntag_from'], query['ntag_to'], query['strcondition'])
FROM table({}.ARTTESTS_LIGHT('{}','{}','{}')) c
""".format(settings.DB_SCHEMA, query['ntag_from'], query['ntag_to'], query['strcondition'])
cur.execute(query_raw)
jobs = cur.fetchall()
cur.close()
Expand Down Expand Up @@ -900,9 +900,9 @@ def artErrors(request):
c.status,
c.pandaid,
c.result
FROM table(ATLAS_PANDABIGMON.ARTTESTS_LIGHT('{}','{}','{}')) c
FROM table({}.ARTTESTS_LIGHT('{}','{}','{}')) c
WHERE c.attemptmark = 0
""".format(query['ntag_from'], query['ntag_to'], query['strcondition'])
""".format(settings.DB_SCHEMA, query['ntag_from'], query['ntag_to'], query['strcondition'])
cur.execute(query_raw)
jobs = cur.fetchall()
cur.close()
Expand Down Expand Up @@ -992,15 +992,15 @@ def updateARTJobList(request):

# Adding to ART_RESULTS_QUEUE jobs with not loaded result json yet
cur = connection.cursor()
cur.execute("""INSERT INTO atlas_pandabigmon.art_results_queue
cur.execute("""INSERT INTO {0}.art_results_queue
(pandaid, IS_LOCKED, LOCK_TIME)
SELECT pandaid, 0, NULL FROM table(ATLAS_PANDABIGMON.ARTTESTS_LIGHT('{}','{}','{}'))
SELECT pandaid, 0, NULL FROM table({0}.ARTTESTS_LIGHT('{1}','{2}','{3}'))
WHERE pandaid is not NULL
and attemptmark = 0
and result is NULL
and status in ('finished', 'failed')
and pandaid not in (select pandaid from atlas_pandabigmon.art_results_queue)
""".format(query['ntag_from'], query['ntag_to'], query['strcondition']))
and pandaid not in (select pandaid from {0}.art_results_queue)
""".format(settings.DB_SCHEMA, query['ntag_from'], query['ntag_to'], query['strcondition']))
cur.close()

data = {
Expand Down Expand Up @@ -1316,9 +1316,9 @@ def sendArtReport(request):
cur = connection.cursor()
query_raw = """
SELECT taskid, package, branch, ntag, nightly_tag, testname, status, result
FROM table(ATLAS_PANDABIGMON.ARTTESTS_LIGHT('{}','{}','{}'))
FROM table({}.ARTTESTS_LIGHT('{}','{}','{}'))
WHERE attemptmark = 0
""".format(query['ntag_from'], query['ntag_to'], query['strcondition'])
""".format(settings.DB_SCHEMA, query['ntag_from'], query['ntag_to'], query['strcondition'])
cur.execute(query_raw)
jobs = cur.fetchall()
cur.close()
Expand Down
117 changes: 71 additions & 46 deletions core/buildmonitor/viewsartmonit.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@
from django.core.cache import cache
import requests
import json, re, datetime
import logging
from django.views.decorators.cache import never_cache
from core.libs.DateEncoder import DateEncoder

_logger = logging.getLogger('bigpandamon')


@never_cache
Expand All @@ -15,6 +17,8 @@ def artmonitviewDemo(request):
# https://bigpanda.cern.ch/art/overview/?ntags=2020-01-14,2020-01-15&view=branches&json

valid, response = initRequest(request)
if not valid:
return response

ts_now = datetime.datetime.now()
s_tstamp = ''
Expand All @@ -25,60 +29,88 @@ def artmonitviewDemo(request):
s_tstamp = str(ts_f)
else:
s_tstamp = s_tstamp + ',' + str(ts_f)
# print('string of tstamps ',s_tstamp)

# getting branches
url10 = "https://bigpanda.cern.ch/art/overview/?ntags=" + s_tstamp + "&view=branches&json"
# print('TRYING requests.get('+url10+')')
r = requests.get(url10, verify=False)
# pprint(r)
_logger.debug('getting branches from {}'.format(url10))
n_attempts = 3
is_success = False
i_attempt = 0
r = None
while i_attempt < n_attempts and not is_success:
r = requests.get(url10, verify=False)
if r.status_code == 200:
is_success = True

if not is_success:
_logger.error("Internal Server Error! Failed to get ART test results for buildmonitor from {} with\n{}".format(
url10,
str(r.text)
))
return render(
request,
'artmonitviewDemo.html',
{'viewParams': request.session['viewParams'],
'resltART': []},
content_type='text/html'
)


a0 = json.loads(r.text)
branch_dict = a0.get('artpackages', {})
branch_list = branch_dict.keys()
# print('Branch list:',branch_list)

# getting ART GRID test results per branch
_logger.debug('Branch list:'.format(branch_list))
dict_result = {}
for branch in branch_list:
url11 = "https://bigpanda.cern.ch/art/tasks/?branch=" + branch + '&ntags=' + s_tstamp + '&json'
# print('TRYING requests.get('+url11+')')
r = requests.get(url11, verify=False)
# pprint(r)
a = json.loads(r.text)
tasks = a.get('arttasks', {})
# cache.set('art-monit-dict', dict_branch, 1800)
reslist = []
dict_branch = {}
for k, v in tasks.items():
if isinstance(v, dict):
for kx, ky in v.items():
if kx == branch:
if isinstance(ky, dict):
for kxx, kyy in ky.items():
if isinstance(kyy, dict):
for kxxx, kyyy in kyy.items():
# print('K ',kxx,kxxx)
if re.search(kxx, kxxx):
# pprint(kyyy)
a0_branch = dict_branch.get(kxxx, {'active': 0, 'succeeded': 0, 'failed': 0,
'finished': 0})
s_active = kyyy['active'] + a0_branch['active']
s_done = kyyy['succeeded'] + a0_branch['succeeded']
s_failed = kyyy['failed'] + a0_branch['failed']
s_finished = kyyy['finished'] + a0_branch['finished']
dict_branch[kxxx] = {'active': s_active, 'succeeded': s_done, 'failed': s_failed,
'finished': s_finished}
# cache.set('art-monit-dict', dict_branch, 1800)
reslist.append([s_active, s_done, s_failed, s_finished])
dict_result[branch] = dict_branch
_logger.debug('TRYING requests.get({})'.format(url11))
try:
r = requests.get(url11, verify=False)
r.raise_for_status()
except requests.RequestException as e:
_logger.exception("General Error\n{}".format(str(e)))
r = None
if r is not None:
a = json.loads(r.text)
tasks = a.get('arttasks', {})
reslist = []
dict_branch = {}
for k, v in tasks.items():
if isinstance(v, dict):
for kx, ky in v.items():
if kx == branch:
if isinstance(ky, dict):
for kxx, kyy in ky.items():
if isinstance(kyy, dict):
for kxxx, kyyy in kyy.items():
if re.search(kxx, kxxx):
a0_branch = dict_branch.get(
kxxx,
{'active': 0, 'succeeded': 0, 'failed': 0, 'finished': 0})
s_active = kyyy['active'] + a0_branch['active']
s_done = kyyy['succeeded'] + a0_branch['succeeded']
s_failed = kyyy['failed'] + a0_branch['failed']
s_finished = kyyy['finished'] + a0_branch['finished']
dict_branch[kxxx] = {
'active': s_active,
'succeeded': s_done,
'failed': s_failed,
'finished': s_finished
}
reslist.append([s_active, s_done, s_failed, s_finished])
dict_result[branch] = dict_branch
cache.set('art-monit-dict', dict_result, 1800)
# dict_from_cache = cache.get('art-monit-dict')
# pprint('===============================')

list2view = []
for k46, v46 in dict_result.items():
for kk, vv in v46.items():
l1 = [k46]
l1.append(kk)
l1.extend([vv['active'], vv['succeeded'], vv['failed'], vv['finished']])
# print('L1 ',l1)
list2view.append(l1)
###########

new_cur = connection.cursor()
query = """
select n.nname as \"BRANCH\", platf.pl,
Expand Down Expand Up @@ -120,13 +152,6 @@ def artmonitviewDemo(request):
dict_loc_result[l_branch] = dict_inter
cache.set('art-local-dict', dict_loc_result, 1800)

# pprint(dict_loc_result)
# for k47, v47 in dict_loc_result.items():
# print('L2',k47)
# pprint(v47)
# for kk, vv in v47.items():
# print('L2 ', k47, kk, vv.get('done','UNDEF'), vv.get('failed','UNDEF'))

data = {'viewParams': request.session['viewParams'], 'resltART': json.dumps(list2view, cls=DateEncoder)}

return render(request, 'artmonitviewDemo.html', data, content_type='text/html')
2 changes: 1 addition & 1 deletion core/cachecontroller/mainmenurls.txt
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
/bigpandamonitor/
/art/
/errorsscat/
/artmonitview
/artmonitview/
/dash/region/
/dash/region/?jobtype=analy&splitby=jobtype
/dash/region/?jobtype=prod&splitby=jobtype
Expand Down
2 changes: 1 addition & 1 deletion core/ddosprotection.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class DDOSMiddleware(object):
]
excepted_views = [
'/grafana/img/', '/payloadlog/', '/statpixel/', '/idds/getiddsfortask/', '/api/dc/staginginfofortask/',
'/art/tasks/',
'/art/tasks/', '/art/overview/'
]
blacklist = ['130.132.21.90', '192.170.227.149']
maxAllowedJSONRequstesParallel = 1
Expand Down
14 changes: 11 additions & 3 deletions core/errorsscattering/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -382,21 +382,29 @@ def errorsScatteringDetailed(request, cloud, reqid):
sum(case when jobstatus = 'finished' then 1 else 0 end) as finishedc,
sum(case when jobstatus in ('finished', 'failed') then 1 else 0 end) as allc,
computingsite, reqid, jeditaskid
from atlas_panda.jobsarchived4 where jeditaskid in (
from {5}.jobsarchived4 where jeditaskid in (
select id from {0} where transactionkey={1}) and modificationtime > to_date('{2}', 'YYYY-MM-DD HH24:MI:SS') and {3}
group by computingsite, jeditaskid, reqid
union
select sum(case when jobstatus = 'failed' then 1 else 0 end) as failedc,
sum(case when jobstatus = 'finished' then 1 else 0 end) as finishedc,
sum(case when jobstatus in ('finished', 'failed') then 1 else 0 end) as allc,
computingsite, reqid, jeditaskid
from atlas_pandaarch.jobsarchived where jeditaskid in (
from {6}.jobsarchived where jeditaskid in (
select id from {0} where transactionkey={1}) and modificationtime > to_date('{2}', 'YYYY-MM-DD HH24:MI:SS') and {3}
group by computingsite, jeditaskid, reqid
) j
where j.allc > 0 and {4}
group by jeditaskid, computingsite, reqid
""".format(tmpTableName, transactionKey, query['modificationtime__castdate__range'][0], jcondition, condition)
""".format(
tmpTableName,
transactionKey,
query['modificationtime__castdate__range'][0],
jcondition,
condition,
settings.DB_SCHEMA_PANDA,
settings.DB_SCHEMA_PANDA_ARCH
)

new_cur.execute(querystr)

Expand Down
2 changes: 1 addition & 1 deletion core/filebrowser/MemoryMonitorPlots.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def prMonPlots(request, pandaid=-1):

if len(job) > 0:
job = job[0]
if 'cmtconfig' in job and 'gpu' in job['cmtconfig']:
if 'cmtconfig' in job and job['cmtconfig'] and 'gpu' in job['cmtconfig']:
processor_type = 'gpu'

plots_list = [
Expand Down
Loading

0 comments on commit 700ae59

Please sign in to comment.