Skip to content

Commit

Permalink
fix pre-commit checkes
Browse files Browse the repository at this point in the history
  • Loading branch information
khaledk2 committed Oct 23, 2023
1 parent 64eecbd commit 7450776
Show file tree
Hide file tree
Showing 3 changed files with 65 additions and 53 deletions.
13 changes: 6 additions & 7 deletions omero_search_engine/api/stats/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,9 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.

from . import stats
from flask import request, jsonify, make_response
import json
from tools.utils.logs_analyser import get_search_terms
from flask import jsonify, Response
from flask import Response


@stats.route("/", methods=["GET"])
def index():
Expand All @@ -31,15 +30,15 @@ def index():
@stats.route("/<resource>/search_terms", methods=["GET"])
def search_terms(resource):
from omero_search_engine import search_omero_app
logs_folder=search_omero_app.config.get("SEARCHENGINE_LOGS_FOLDER")
content=get_search_terms(logs_folder,resource=resource,return_file_content=True)

logs_folder = search_omero_app.config.get("SEARCHENGINE_LOGS_FOLDER")
content = get_search_terms(logs_folder, resource=resource, return_file_content=True)

return Response(
content,
mimetype="text/csv",
headers={
"Content-disposition": "attachment; filename=%s_stats.csv"
% (resource)
"Content-disposition": "attachment; filename=%s_stats.csv" % (resource)
},
)

Expand Down
101 changes: 56 additions & 45 deletions tools/utils/logs_analyser.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,65 +2,75 @@
import os
import sys
import logging

logging.basicConfig(stream=sys.stdout, level=logging.INFO)

'''
"""
this script read the log file and get the search terms
it analyses the file and produces reports
e.g. csv files contain the search terms
'''
e.g. csv files contain the search terms
"""


def get_search_terms(folder_name,resource=None, return_file_content=False):
logging.info("checking files inisde: %s"%folder_name)
def get_search_terms(folder_name, resource=None, return_file_content=False):
logging.info("checking files inisde: %s" % folder_name)
resourses = {}
for root, dirs, files in os.walk(folder_name):
logging.info("0....%s,%s,%s"%(root,dirs, files))
logging.info("0....%s,%s,%s" % (root, dirs, files))
for file_name in files:
logging.info("1..... checking %s"% file_name)
if file_name.endswith('engine_gunilog.log'):
file_name=os.path.join(root,file_name)
logging.info("1..... checking %s" % file_name)
if file_name.endswith("engine_gunilog.log"):
file_name = os.path.join(root, file_name)
logging.info("2..... checking %s" % file_name)
analyse_log_file(file_name,resourses)
analyse_log_file(file_name, resourses)
logging.info("Write the reports")
contents=write_reports(resourses,resource, return_file_content,os.path.join(folder_name,"report.csv"))
contents = write_reports(
resourses,
resource,
return_file_content,
os.path.join(folder_name, "report.csv"),
)
if return_file_content:
return contents

def analyse_log_file(file_name,resourses):
#file_name="/mnt/d/logs/engine_gunilog.log"
logging.info ("Analyse: %s"%file_name)

def analyse_log_file(file_name, resourses):
# file_name="/mnt/d/logs/engine_gunilog.log"
logging.info("Analyse: %s" % file_name)
f = open(file_name, "r")
contents=f.read()
logs=contents.split("INFO in query_handler: -------------------------------------------------")
contents = f.read()
logs = contents.split(
"INFO in query_handler: -------------------------------------------------"
)
f.close()

failes=0
suc=0
co=0
filters=[]
for i in range (0, len(logs),2):
cont=logs[i].split(("\n"))
lo=cont[1].split("in query_handler:")
ss="{'and_filters':"+ lo[-1].split("{'and_filters':")[-1]
failes = 0
suc = 0
co = 0
filters = []
for i in range(0, len(logs), 2):
cont = logs[i].split(("\n"))
lo = cont[1].split("in query_handler:")
ss = "{'and_filters':" + lo[-1].split("{'and_filters':")[-1]
if "[20]" in ss:
continue
co+=1
ss=ss.replace("'", '"').replace('False', 'false').replace('None','"None"')
co += 1
ss = ss.replace("'", '"').replace("False", "false").replace("None", '"None"')
try:
filters.append(json.loads(ss,strict=False))
suc=suc+1
except:
failes=failes+1

filters.append(json.loads(ss, strict=False))
suc = suc + 1
except Exception as e:
print(str(e))
failes = failes + 1

for filter in filters:
check_filters(filter.get("and_filters"), resourses)
for or_f in filter.get("or_filters"):
check_filters(or_f, resourses)

def check_filters(conds,resourses):
for cond in conds:

def check_filters(conds, resourses):
for cond in conds:
if cond.get("resource") in resourses:
names_values = resourses[cond.get("resource")]
else:
Expand All @@ -73,26 +83,27 @@ def check_filters(conds,resourses):
else:
names_values[name] = [value]

def write_reports(resourses, resource, return_file_content,file_name):

def write_reports(resourses, resource, return_file_content, file_name):
for res, itms in resourses.items():
lines = ["key,total hits,unique hits"]
for name,values in itms.items():
line=[name]
vv=[]
for name, values in itms.items():
line = [name]
vv = []
for val in values:
if val not in vv:
vv.append(val)
line.insert(1,str(len(values)))
line.insert(1, str(len(values)))
line.insert(2, str(len(vv)))
lines.append(','.join(line))
contents="\n".join(lines)
lines.append(",".join(line))
contents = "\n".join(lines)
if return_file_content:
if res==resource:
print ("================================")
print (resource, return_file_content)
print ("================================")
if res == resource:
print("================================")
print(resource, return_file_content)
print("================================")
return contents
else:
f = open(file_name.replace(".csv","_%s.csv"%res), "w")
f = open(file_name.replace(".csv", "_%s.csv" % res), "w")
f.write(contents)
f.close()
4 changes: 3 additions & 1 deletion tools/utils/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,9 @@ def copy_tools_subfolder():
destination_folder = "/etc/searchengine/"
if not os.path.isdir(destination_folder):
destination_folder = os.path.expanduser("~")
destination_folder = os.path.join(destination_folder, "searchengine/maintenance_scripts")
destination_folder = os.path.join(
destination_folder, "searchengine/maintenance_scripts"
)

if not os.path.isdir(destination_folder):
shutil.copytree(subfolder, destination_folder)
Expand Down

0 comments on commit 7450776

Please sign in to comment.