diff --git a/modules/connectivity.py b/modules/connectivity.py
index 41248fa..d08a872 100644
--- a/modules/connectivity.py
+++ b/modules/connectivity.py
@@ -19,19 +19,22 @@ def test_db_connections(self):
db_errors = []
for connection in all_connections:
- if connection.name not in ('looker', 'looker__ilooker', 'looker__internal__analytics'):
- if connection.dialect:
- connection_tests = {'tests': connection.dialect.connection_tests}
- for test in connection_tests['tests']:
- try:
- db_validation = self.looker_client.test_connection(connection.name, test)
- for item in db_validation:
- if item.status != 'success':
- db_errors.append("Database Connection {} Test '{}' returned '{}'".format(connection.name,
- item.name,
- item.message))
- except Exception as e:
- print("Database Connection test for {} failed due to {}".format(connection.name, e))
+ if (
+ connection.name
+ not in ('looker', 'looker__ilooker', 'looker__internal__analytics')
+ and connection.dialect
+ ):
+ connection_tests = {'tests': connection.dialect.connection_tests}
+ for test in connection_tests['tests']:
+ try:
+ db_validation = self.looker_client.test_connection(connection.name, test)
+ for item in db_validation:
+ if item.status != 'success':
+ db_errors.append("Database Connection {} Test '{}' returned '{}'".format(connection.name,
+ item.name,
+ item.message))
+ except Exception as e:
+ print("Database Connection test for {} failed due to {}".format(connection.name, e))
return db_errors
def count_all_integrations(self):
@@ -53,9 +56,6 @@ def test_integrations(self):
integration_errors.append("SUCCESS - Integration {} connectivity test".format(elem.label))
except Exception as e:
integration_errors.append("Integration {} connectivity test could not run due to {}".format(elem.label, e))
- else:
- # integration_errors.append("Integration {} not enabled.".format(elem.label))
- pass
return integration_errors
def count_all_datagroups(self):
@@ -65,10 +65,10 @@ def count_all_datagroups(self):
def test_datagroups(self):
'''Tests the datagroups and returns failures'''
all_datagroups = self.looker_client.all_datagroups()
- group_errors = []
- for elem in all_datagroups:
- if elem.trigger_error:
- group_errors.append("Datagroup \"{}\" on model \"{}\" has this error:\t{}".format(elem.name,
- elem.model_name,
- elem.trigger_error))
- return group_errors
\ No newline at end of file
+ return [
+ "Datagroup \"{}\" on model \"{}\" has this error:\t{}".format(
+ elem.name, elem.model_name, elem.trigger_error
+ )
+ for elem in all_datagroups
+ if elem.trigger_error
+ ]
\ No newline at end of file
diff --git a/modules/content.py b/modules/content.py
index 78a92ac..60534c4 100644
--- a/modules/content.py
+++ b/modules/content.py
@@ -15,7 +15,7 @@ def validate_content(self):
'''Runs the Content Validator and returns failures'''
error = self.looker_client.content_validation().content_with_errors
looks_error, dash_errors = [], []
- for i in range(0, len(error)):
+ for i in range(len(error)):
error_msg = re.sub(r'\[ContentValidationError\(message=', '', str(error[i].errors))
error_msg = re.sub(r'\, field_name=.*', '', error_msg)
if error[i].look:
diff --git a/modules/email_content.py b/modules/email_content.py
index 6da6e40..4b770a7 100644
--- a/modules/email_content.py
+++ b/modules/email_content.py
@@ -5,7 +5,7 @@
def email_body(looker_url, looker_version, total_users, total_projects, total_erring_content, total_schedules,
total_pdt_errors, pdt_less_30, pdt_30_60, pdt_over_60, unlimited_queries, total_connections, total_integrations, total_datagroups):
'''Generates the summary body for the email'''
- body = '''
+ return '''
Today's Report:
Looker instance: {}
Looker version: {}
@@ -41,19 +41,22 @@ def email_body(looker_url, looker_version, total_users, total_projects, total_er
Find more information go to the repo
Something Wrong? Tell us
- '''.format(looker_version,
- looker_url,
- total_users,
- total_projects,
- total_erring_content,
- total_schedules,
- total_pdt_errors,
- pdt_less_30, pdt_30_60, pdt_over_60,
- unlimited_queries,
- total_connections,
- total_integrations,
- total_datagroups)
- return body
+ '''.format(
+ looker_version,
+ looker_url,
+ total_users,
+ total_projects,
+ total_erring_content,
+ total_schedules,
+ total_pdt_errors,
+ pdt_less_30,
+ pdt_30_60,
+ pdt_over_60,
+ unlimited_queries,
+ total_connections,
+ total_integrations,
+ total_datagroups,
+ )
def email_attachment(looker_version, looker_url, total_users, user_details, total_projects,
@@ -85,12 +88,12 @@ def email_attachment(looker_version, looker_url, total_users, user_details, tota
list_errors_schedules = list_errors_schedules,
total_pdt_errors = total_pdt_errors,
list_pdt_errors = list_pdt_errors,
-
+
unlimited_queries = unlimited_queries,
report_url = report_url,
is_clustered = is_clustered,
list_nodes = list_nodes,
-
+
total_connections = total_connections,
list_errors_connections = list_errors_connections,
@@ -100,9 +103,8 @@ def email_attachment(looker_version, looker_url, total_users, user_details, tota
total_datagroups = total_datagroups,
list_errors_datagroups = list_errors_datagroups
)
- html_file = open('./modules/rendering/rendered_version.html', 'w')
- html_file.write(output_text)
- html_file.close()
+ with open('./modules/rendering/rendered_version.html', 'w') as html_file:
+ html_file.write(output_text)
HTML(filename='./modules/rendering/rendered_version.html').write_pdf('./modules/rendering/final_attachment.pdf',
stylesheets=[CSS(string='@page {{ font-family:arial, serif; font-size: 6; }}')])
diff --git a/modules/general.py b/modules/general.py
index 91db79b..efb8807 100644
--- a/modules/general.py
+++ b/modules/general.py
@@ -17,25 +17,22 @@ def get_looker_instance():
config = configparser.ConfigParser()
config.read('looker.ini')
config_details = dict(config['Looker'])
- instance_url = regex_base_url(config_details['base_url'])
- return instance_url
+ return regex_base_url(config_details['base_url'])
else:
LOOKERSDK_BASE_URL = os.environ.get('LOOKERSDK_BASE_URL')
- instance_url = regex_base_url(LOOKERSDK_BASE_URL)
- return instance_url
+ return regex_base_url(LOOKERSDK_BASE_URL)
def format_output(function_results):
'''Formats list of errors in Looker to display first 20 elements'''
if isinstance(function_results, (list)):
- if len(function_results) >=1:
- formatted_results = function_results[:20]
- formatted_results.append('...')
- return formatted_results
- else:
+ if len(function_results) < 1:
return ['No issues found.']
+ formatted_results = function_results[:20]
+ formatted_results.append('...')
+ return formatted_results
elif isinstance(function_results, (tuple)):
formatted_results = list(function_results)[:20]
formatted_results.append('...')
- return formatted_results
+ return formatted_results
else:
return function_results
\ No newline at end of file
diff --git a/modules/performance.py b/modules/performance.py
index 7d47788..1b28fe4 100644
--- a/modules/performance.py
+++ b/modules/performance.py
@@ -22,25 +22,25 @@ def unlimited_downloads(self):
"query.limit"
],
filters = {
- "history.created_time": "24 hours",
- "history.source": "-regenerator,-suggest",
- "query.limit": ">5000"
+ "history.created_time": "24 hours",
+ "history.source": "-regenerator,-suggest",
+ "query.limit": ">5000"
},
sorts = ["history.created_time desc"],
limit = "500"
)
unltd_downloads = self.looker_client.create_query(body)
unlimited_downloads = self.looker_client.run_query(unltd_downloads.id, result_format='json')
- if unlimited_downloads:
- unltd_source, unltd_users = [], []
- for unltd_query in json.loads(unlimited_downloads):
- unltd_source.append(unltd_query['history.source'])
- unltd_users.append(unltd_query['user.id'])
- results = "{} users have ran queries with more than 5000 rows from these sources: {}".format(len(list(set(unltd_users))), list(set(unltd_source)))
- return results, unltd_downloads.share_url
- else:
+ if not unlimited_downloads:
return None, unltd_downloads.share_url
+ unltd_source, unltd_users = [], []
+ for unltd_query in json.loads(unlimited_downloads):
+ unltd_source.append(unltd_query['history.source'])
+ unltd_users.append(unltd_query['user.id'])
+ results = "{} users have ran queries with more than 5000 rows from these sources: {}".format(len(list(set(unltd_users))), list(set(unltd_source)))
+ return results, unltd_downloads.share_url
+
def check_if_clustered(self):
'''Check is Looker is clustered settup'''
body = models.WriteQuery(
@@ -54,44 +54,42 @@ def check_if_clustered(self):
cluster_check = self.looker_client.create_query(body)
check_clustered = self.looker_client.run_query(cluster_check.id, result_format='json')
nodes_count = len(json.loads(check_clustered))
- node_is_cluster = []
- for node in json.loads(check_clustered):
- node_is_cluster.append(node['node.clustered'])
- is_clustered = nodes_count > 1 and list(set(node_is_cluster))[0] == "Yes"
- return is_clustered #, len(node_is_cluster)
+ node_is_cluster = [
+ node['node.clustered'] for node in json.loads(check_clustered)
+ ]
+ return nodes_count > 1 and list(set(node_is_cluster))[0] == "Yes"
def nodes_matching(self):
'''For clusters, checks nodes are on same version'''
body = models.WriteQuery(
- model = "system__activity",
- view = "history",
- fields = [
- "node.id",
- "node.version",
- "node.last_heartbeat_time",
- "node.last_heartbeat_time"
- ],
- filters = {
- "node.last_heartbeat_date": "1 days"
- },
- sorts = ["node.last_heartbeat_time desc"],
- limit = "500",
- vis_config = {
- "hidden_fields": ["node.id","node.version","node.last_heartbeat_time","most_recent_heartbeat","node.count"]
- },
- dynamic_fields = "[{\"table_calculation\":\"most_recent_heartbeat\",\"label\":\"most_recent_heartbeat\",\"expression\":\"diff_minutes(${node.last_heartbeat_time}, now())\",\"value_format\":null,\"value_format_name\":null,\"_kind_hint\":\"dimension\",\"_type_hint\":\"number\"},{\"table_calculation\":\"node_version_at_last_beat\",\"label\":\"node_version_at_last_beat\",\"expression\":\"if(diff_minutes(${node.last_heartbeat_time}, now()) > ${most_recent_heartbeat}*1.10 OR diff_minutes(${node.last_heartbeat_time}, now()) < ${most_recent_heartbeat}*0.90, ${node.version}, null)\",\"value_format\":null,\"value_format_name\":null,\"_kind_hint\":\"dimension\",\"_type_hint\":\"string\"}]" )
+ model = "system__activity",
+ view = "history",
+ fields = [
+ "node.id",
+ "node.version",
+ "node.last_heartbeat_time",
+ "node.last_heartbeat_time"
+ ],
+ filters = {
+ "node.last_heartbeat_date": "1 days"
+ },
+ sorts = ["node.last_heartbeat_time desc"],
+ limit = "500",
+ vis_config = {
+ "hidden_fields": ["node.id","node.version","node.last_heartbeat_time","most_recent_heartbeat","node.count"]
+ },
+ dynamic_fields = "[{\"table_calculation\":\"most_recent_heartbeat\",\"label\":\"most_recent_heartbeat\",\"expression\":\"diff_minutes(${node.last_heartbeat_time}, now())\",\"value_format\":null,\"value_format_name\":null,\"_kind_hint\":\"dimension\",\"_type_hint\":\"number\"},{\"table_calculation\":\"node_version_at_last_beat\",\"label\":\"node_version_at_last_beat\",\"expression\":\"if(diff_minutes(${node.last_heartbeat_time}, now()) > ${most_recent_heartbeat}*1.10 OR diff_minutes(${node.last_heartbeat_time}, now()) < ${most_recent_heartbeat}*0.90, ${node.version}, null)\",\"value_format\":null,\"value_format_name\":null,\"_kind_hint\":\"dimension\",\"_type_hint\":\"string\"}]" )
node_check = self.looker_client.create_query(body)
nodes_versions = self.looker_client.run_query(node_check.id, result_format='json')
- results = []
- for version in json.loads(nodes_versions):
- if version['node_version_at_last_beat']: # to exclude older heartbeat checks with None values
- results.append(version['node_version_at_last_beat'])
-
+ results = [
+ version['node_version_at_last_beat']
+ for version in json.loads(nodes_versions)
+ if version['node_version_at_last_beat']
+ ]
diff_node_version = []
if len(list(set(results))) == 1:
diff_node_version.append("All {} Nodes found on same Looker version".format(len(results)))
- return diff_node_version
- else:
+ else:
for k,v in Counter(results).items():
diff_node_version.append("{} nodes found on version {}".format(v,k))
- return diff_node_version
\ No newline at end of file
+ return diff_node_version
\ No newline at end of file
diff --git a/modules/projects.py b/modules/projects.py
index ffcbe86..e9ec227 100644
--- a/modules/projects.py
+++ b/modules/projects.py
@@ -15,10 +15,7 @@ def count_all_projects(self):
def all_projects(self):
'''Returns the list of projects'''
my_projects = self.looker_client.all_projects(fields='id,name')
- project_ids = []
- for i in range(0, len(my_projects)):
- project_ids.append(my_projects[i].id)
- return project_ids
+ return [my_projects[i].id for i in range(len(my_projects))]
def validate_lookml(self, project_id):
'''Returns LookML validation errors'''
@@ -48,8 +45,6 @@ def run_git_test(self, project_id):
one_test = self.looker_client.run_git_connection_test(project_id, test_id.id)
if one_test.status != "pass":
git_tests.append("Test ID: {} failed on Project: {}".format(test_id.id, project_id))
- else:
- pass
# change session back to 'production'
self.looker_client.update_session({"workspace_id": "production"})
return git_tests
@@ -75,8 +70,5 @@ def get_lookml_test(self, project_id):
def run_lookml_test(self, project_id):
'''Runs the LookML tests'''
results = self.looker_client.run_lookml_test(project_id)
- if not results:
- pass
- # return "No test set up on Project: {}".format(project_id)
- else:
+ if results:
return results
diff --git a/modules/schedules.py b/modules/schedules.py
index 47b0804..595784a 100644
--- a/modules/schedules.py
+++ b/modules/schedules.py
@@ -35,9 +35,13 @@ def get_failed_schedules(self):
)
schedules_query = self.looker_client.create_query(body)
failed_schedules = self.looker_client.run_query(schedules_query.id, result_format='json')
- cleaned_errors = []
- for elem in json.loads(failed_schedules):
- cleaned_errors.append("Schedule \'{}\' failed to send to {}".format(elem['scheduled_job.name'], elem['scheduled_plan_destination.type']))
+ cleaned_errors = [
+ "Schedule \'{}\' failed to send to {}".format(
+ elem['scheduled_job.name'], elem['scheduled_plan_destination.type']
+ )
+ for elem in json.loads(failed_schedules)
+ ]
+
if failed_schedules:
cleaned_errors = list(set(cleaned_errors)) # set to remove duplicates
return cleaned_errors, len(json.loads(failed_schedules))
@@ -56,10 +60,13 @@ def get_pdts_status(self):
)
failed_pdts = self.looker_client.create_query(body)
failed_pdts_list = self.looker_client.run_query(failed_pdts.id, result_format='json')
- cleaned_errors = []
- for elem in json.loads(failed_pdts_list):
- # cleaned_errors.append("PDT \'{}\' failed with error: {}".format(elem['pdt_event_log.view_name'], elem['error_message']))
- cleaned_errors.append("PDT \'{}\' failed on connection: {}".format(elem['pdt_event_log.view_name'], elem['pdt_event_log.connection']))
+ cleaned_errors = [
+ "PDT \'{}\' failed on connection: {}".format(
+ elem['pdt_event_log.view_name'], elem['pdt_event_log.connection']
+ )
+ for elem in json.loads(failed_pdts_list)
+ ]
+
if failed_pdts_list:
cleaned_errors = list(set(cleaned_errors)) # set to remove duplicates
# len(json.loads(failed_pdts_list)) will return the number of failures (# of PDTs * build attempts)
diff --git a/modules/send_email.py b/modules/send_email.py
index fbaf65d..46f6c2b 100644
--- a/modules/send_email.py
+++ b/modules/send_email.py
@@ -13,9 +13,7 @@ def send_report_out(content):
try:
# format env variable like: 'example1@mail.com,example2@mail.com'
email_list = os.environ.get('THEMIS_EMAIL_RECIPIENTS')
- to_emails = []
- for email in email_list.split(','):
- to_emails.append(email)
+ to_emails = [email for email in email_list.split(',')]
except Exception as e:
print("Missing THEMIS_EMAIL_RECIPIENTS Variables {}".format(e))
diff --git a/modules/users.py b/modules/users.py
index 38689f2..9fb50f4 100644
--- a/modules/users.py
+++ b/modules/users.py
@@ -22,13 +22,9 @@ def count_all_users(self):
def get_users_issue(self):
'''Returns locked out users for the Looker instance'''
all_users = self.looker_client.all_users(fields='id, is_disabled')
- disabled_users = []
+ disabled_users = [i for i in all_users if i.is_disabled]
- for i in all_users:
- if i.is_disabled:
- disabled_users.append(i)
- user_results = []
- user_results.append("Disabled Looker users: {}".format(len(disabled_users)))
+ user_results = ["Disabled Looker users: {}".format(len(disabled_users))]
locked_out = self.looker_client.all_user_login_lockouts()
user_results.append("Locked Out Looker users: {}".format(len(locked_out)))
return user_results
diff --git a/tests/test_setup.py b/tests/test_setup.py
index e0469f6..f7beb57 100644
--- a/tests/test_setup.py
+++ b/tests/test_setup.py
@@ -13,10 +13,12 @@ class LookerSDKTestCase(unittest.TestCase):
def test_looker_credentials(self):
'''Tests if a looker.ini file is created in the top level dir. Will fail if using env var'''
parent_dir = Path(__file__).parents[1]
- cred_file = []
- for file in os.listdir(parent_dir):
- if file.endswith(".ini"):
- cred_file.append(os.path.join(parent_dir, file))
+ cred_file = [
+ os.path.join(parent_dir, file)
+ for file in os.listdir(parent_dir)
+ if file.endswith(".ini")
+ ]
+
self.assertTrue(cred_file or self.__class__.base_url and self.__class__.api_id and self.__class__.api_secret, msg="Looker Credentials Not Found")
@@ -28,8 +30,8 @@ def test_recipients_env_var(self):
def test_emails_format(self):
'''Tests the format for recipient emails'''
email_format_ok = []
+ regex = r'^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,3})+$'
for email in self.__class__.email_list.split(','):
- regex = r'^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,3})+$'
email = email.strip()
if not (re.search(regex, str(email))):
email_format_ok.append(email_format_ok)