From d3dedebf2d6b0d2aef0b1205016f75d3d3cd7dd6 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 15:25:22 -0500 Subject: [PATCH 01/63] Update python version in docker container --- docker/compose/mozdef_base/Dockerfile | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docker/compose/mozdef_base/Dockerfile b/docker/compose/mozdef_base/Dockerfile index 7df5c5496..55b567616 100644 --- a/docker/compose/mozdef_base/Dockerfile +++ b/docker/compose/mozdef_base/Dockerfile @@ -21,11 +21,11 @@ RUN \ useradd -ms /bin/bash -d /opt/mozdef -m mozdef && \ mkdir /opt/mozdef/envs && \ cd /opt/mozdef && \ - yum install -y python \ - python-devel \ - python-pip && \ + yum install -y python36 \ + python36-devel \ + python36-pip && \ yum clean all && \ - pip install virtualenv && \ + pip3 install virtualenv && \ mkdir /opt/mozdef/envs/mozdef && \ mkdir /opt/mozdef/envs/mozdef/cron @@ -46,7 +46,7 @@ RUN chown -R mozdef:mozdef /opt/mozdef/ USER mozdef RUN \ - virtualenv /opt/mozdef/envs/python && \ + virtualenv -p /usr/bin/python3.6 /opt/mozdef/envs/python && \ source /opt/mozdef/envs/python/bin/activate && \ pip install -r /opt/mozdef/envs/mozdef/requirements.txt && \ cd /opt/mozdef/envs/mozdef/mozdef_util && \ From 3a2220df7dba85bbcc405dc4d1575b221ac7863a Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 15:25:36 -0500 Subject: [PATCH 02/63] Update python version definition in travisci --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 6b8743413..61128fd93 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,6 @@ language: python python: - - "2.7" + - "3.6" sudo: required services: - docker From 392a44d311df39f50c1ed0fc659bdde37fc8a422 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 16:19:05 -0500 Subject: [PATCH 03/63] Update mozdef-util version to support python3 install --- requirements.txt | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/requirements.txt b/requirements.txt index 9166cc4c9..2a59f2ada 100644 --- a/requirements.txt +++ b/requirements.txt @@ -25,15 +25,15 @@ google-api-python-client==1.4.0 hjson==2.0.2 httplib2==0.9.2 idna==2.6 -ipaddr==2.1.11 +ipaddr==2.2.0 ipaddress==1.0.17 ipwhois==0.15.0 jmespath==0.9.3 kombu==4.1.0 meld3==1.0.2 mozdef-client==1.0.11 -mozdef-util==2.0.0 -netaddr==0.7.1 +mozdef-util==2.0.3 +netaddr==0.7.19 nose==1.3.7 oauth2client==1.4.12 packaging==16.8 @@ -51,12 +51,11 @@ requests-jwt==0.5.3 requests==2.20.0 requests-futures==0.9.7 rsa==3.1.4 -s3cmd==1.0.1 setuptools-scm==1.11.1 simplejson==3.13.2 six==1.11.0 slackclient==1.0.9 -supervisor==3.3.1 +supervisor==4.0.3 tzlocal==1.4 uritemplate==0.6 urllib3==1.24.3 From f11349f7fc76c7e0da906f9ba09127a3c8e11e0f Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 15:39:09 -0500 Subject: [PATCH 04/63] Update mozdef-util version to 2.0.1 --- mozdef_util/HISTORY.rst | 7 +++++++ mozdef_util/setup.py | 9 ++++++--- requirements.txt | 2 +- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/mozdef_util/HISTORY.rst b/mozdef_util/HISTORY.rst index 7f1374546..43009e022 100644 --- a/mozdef_util/HISTORY.rst +++ b/mozdef_util/HISTORY.rst @@ -55,8 +55,15 @@ Add is_ip utility function * Transition away from custom _type for elasticsearch documents + 2.0.0 (2019-06-27) ------------------ * Add support for Elasticsearch 6 * Remove support for Elasticsearch 5 + + +2.0.1 (2019-06-28) +------------------ + +* Fixed setup.py relative file paths diff --git a/mozdef_util/setup.py b/mozdef_util/setup.py index 48730eb6d..bf0013afd 100644 --- a/mozdef_util/setup.py +++ b/mozdef_util/setup.py @@ -3,12 +3,15 @@ """The setup script.""" +import os from setuptools import setup, find_packages -with open('README.rst') as readme_file: +readme_path = os.path.join(os.path.dirname(__file__), 'README.rst') +with open(readme_path) as readme_file: readme = readme_file.read() -with open('HISTORY.rst') as history_file: +history_path = os.path.join(os.path.dirname(__file__), 'HISTORY.rst') +with open(history_path) as history_file: history = history_file.read() requirements = [ @@ -56,6 +59,6 @@ test_suite='tests', tests_require=[], url='https://github.com/mozilla/MozDef/tree/master/lib', - version='2.0.0', + version='2.0.1', zip_safe=False, ) diff --git a/requirements.txt b/requirements.txt index 2a59f2ada..66aa63782 100644 --- a/requirements.txt +++ b/requirements.txt @@ -32,7 +32,7 @@ jmespath==0.9.3 kombu==4.1.0 meld3==1.0.2 mozdef-client==1.0.11 -mozdef-util==2.0.3 +mozdef-util==2.0.1 netaddr==0.7.19 nose==1.3.7 oauth2client==1.4.12 From 20de95925613fef18603402741339e664fbb0e14 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 15:56:11 -0500 Subject: [PATCH 05/63] Fix setup file for version 2.0.2 --- mozdef_util/setup.py | 3 ++- requirements.txt | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/mozdef_util/setup.py b/mozdef_util/setup.py index bf0013afd..2e7aa11da 100644 --- a/mozdef_util/setup.py +++ b/mozdef_util/setup.py @@ -52,6 +52,7 @@ license='MPL-2.0', long_description=readme + '\n\n' + history, include_package_data=True, + package_data={'mozdef_util': ['HISTORY.rst', 'README.rst']}, keywords='mozdef_util', name='mozdef_util', packages=find_packages(), @@ -59,6 +60,6 @@ test_suite='tests', tests_require=[], url='https://github.com/mozilla/MozDef/tree/master/lib', - version='2.0.1', + version='2.0.2', zip_safe=False, ) diff --git a/requirements.txt b/requirements.txt index 66aa63782..08fda8f85 100644 --- a/requirements.txt +++ b/requirements.txt @@ -32,7 +32,7 @@ jmespath==0.9.3 kombu==4.1.0 meld3==1.0.2 mozdef-client==1.0.11 -mozdef-util==2.0.1 +mozdef-util==2.0.2 netaddr==0.7.19 nose==1.3.7 oauth2client==1.4.12 From aeb04200c7df664bb96721e50e525b369f86a221 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 16:15:34 -0500 Subject: [PATCH 06/63] Fix import static files in setup.py for 2.0.3 --- mozdef_util/HISTORY.rst | 12 ++++++++++++ mozdef_util/MANIFEST.in | 2 ++ mozdef_util/setup.py | 3 +-- requirements.txt | 2 +- 4 files changed, 16 insertions(+), 3 deletions(-) create mode 100644 mozdef_util/MANIFEST.in diff --git a/mozdef_util/HISTORY.rst b/mozdef_util/HISTORY.rst index 43009e022..8fe5ce3bb 100644 --- a/mozdef_util/HISTORY.rst +++ b/mozdef_util/HISTORY.rst @@ -67,3 +67,15 @@ Add is_ip utility function ------------------ * Fixed setup.py relative file paths + + +2.0.2 (2019-06-28) +------------------ + +* Attempted fix at including static files + + +2.0.3 (2019-06-28) +------------------ + +* Fixed static file includes in python package diff --git a/mozdef_util/MANIFEST.in b/mozdef_util/MANIFEST.in new file mode 100644 index 000000000..5e734eb2b --- /dev/null +++ b/mozdef_util/MANIFEST.in @@ -0,0 +1,2 @@ +include HISTORY.rst +include README.rst diff --git a/mozdef_util/setup.py b/mozdef_util/setup.py index 2e7aa11da..2363dc786 100644 --- a/mozdef_util/setup.py +++ b/mozdef_util/setup.py @@ -52,7 +52,6 @@ license='MPL-2.0', long_description=readme + '\n\n' + history, include_package_data=True, - package_data={'mozdef_util': ['HISTORY.rst', 'README.rst']}, keywords='mozdef_util', name='mozdef_util', packages=find_packages(), @@ -60,6 +59,6 @@ test_suite='tests', tests_require=[], url='https://github.com/mozilla/MozDef/tree/master/lib', - version='2.0.2', + version='2.0.3', zip_safe=False, ) diff --git a/requirements.txt b/requirements.txt index 08fda8f85..2a59f2ada 100644 --- a/requirements.txt +++ b/requirements.txt @@ -32,7 +32,7 @@ jmespath==0.9.3 kombu==4.1.0 meld3==1.0.2 mozdef-client==1.0.11 -mozdef-util==2.0.2 +mozdef-util==2.0.3 netaddr==0.7.19 nose==1.3.7 oauth2client==1.4.12 From f1c4287fa58ff1a398a9733d32ed8409df03ddcd Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 13:13:28 -0500 Subject: [PATCH 07/63] Ensure parenthesis for print statements --- alerts/celeryconfig.py | 2 +- .../mozdef_bootstrap/files/initial_setup.py | 18 +++++++++--------- mq/esworker_cloudtrail.py | 1 - tests/conftest.py | 2 +- 4 files changed, 11 insertions(+), 12 deletions(-) diff --git a/alerts/celeryconfig.py b/alerts/celeryconfig.py index 696520604..cdf396c3e 100644 --- a/alerts/celeryconfig.py +++ b/alerts/celeryconfig.py @@ -52,7 +52,7 @@ # Load logging config dictConfig(LOGGING) -# print CELERYBEAT_SCHEDULE +# print(CELERYBEAT_SCHEDULE) # Optional configuration, see the application user guide. # app.conf.update( diff --git a/docker/compose/mozdef_bootstrap/files/initial_setup.py b/docker/compose/mozdef_bootstrap/files/initial_setup.py index 4e4bba75c..3b54d8d19 100644 --- a/docker/compose/mozdef_bootstrap/files/initial_setup.py +++ b/docker/compose/mozdef_bootstrap/files/initial_setup.py @@ -34,7 +34,7 @@ if esserver is None: esserver = args.esserver esserver = esserver.strip('/') -print "Connecting to " + esserver +print("Connecting to " + esserver) client = ElasticsearchClient(esserver) kibana_url = os.environ.get('OPTIONS_KIBANAURL', args.kibana_url) @@ -72,12 +72,12 @@ try: all_indices = client.get_indices() except ConnectionError: - print 'Unable to connect to Elasticsearch...retrying' + print('Unable to connect to Elasticsearch...retrying') sleep(5) else: break else: - print 'Cannot connect to Elasticsearch after ' + str(total_num_tries) + ' tries, exiting script.' + print('Cannot connect to Elasticsearch after ' + str(total_num_tries) + ' tries, exiting script.') exit(1) refresh_interval = getConfig('refresh_interval', '1s', args.backup_conf_file) @@ -102,26 +102,26 @@ # Create initial indices if event_index_name not in all_indices: - print "Creating " + event_index_name + print("Creating " + event_index_name) client.create_index(event_index_name, index_config=index_settings) client.create_alias('events', event_index_name) if previous_event_index_name not in all_indices: - print "Creating " + previous_event_index_name + print("Creating " + previous_event_index_name) client.create_index(previous_event_index_name, index_config=index_settings) client.create_alias('events-previous', previous_event_index_name) if alert_index_name not in all_indices: - print "Creating " + alert_index_name + print("Creating " + alert_index_name) client.create_index(alert_index_name, index_config=index_settings) client.create_alias('alerts', alert_index_name) if weekly_index_alias not in all_indices: - print "Creating " + weekly_index_alias + print("Creating " + weekly_index_alias) client.create_alias_multiple_indices(weekly_index_alias, [event_index_name, previous_event_index_name]) if state_index_name not in all_indices: - print "Creating " + state_index_name + print("Creating " + state_index_name) client.create_index(state_index_name, index_config=state_index_settings) # Wait for kibana service to get ready @@ -156,7 +156,7 @@ with open(json_file_path) as json_data: mapping_data = json.load(json_data) index_name = mapping_data['attributes']['title'] - print "Creating {0} index mapping".format(index_name) + print("Creating {0} index mapping".format(index_name)) mapping_url = kibana_url + "/api/saved_objects/index-pattern/" + index_name resp = requests.post(url=mapping_url, data=json.dumps(mapping_data), headers=kibana_headers) if not resp.ok: diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index 2e2999438..ec7b2481b 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -104,7 +104,6 @@ def assume_role(self, policy=policy).credentials logger.debug("Assumed new role with credential %s" % self.credentials[role_arn].to_dict()) except Exception, e: - print e logger.error("Unable to assume role %s due to exception %s" % (role_arn, e.message)) self.credentials[role_arn] = False return self.credentials[role_arn] diff --git a/tests/conftest.py b/tests/conftest.py index 428f7a741..2ba78f23d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -41,5 +41,5 @@ def pytest_configure(config): warning_text += "\n** WARNING - The --delete_queues flag has been set. We will be purging RabbitMQ queues before test execution**\n" warning_text += "Continuing the unit test execution in 10 seconds...CANCEL ME IF YOU DO NOT WANT PREVIOUS QUEUES PURGED!!! **\n" - print warning_text + print(warning_text) time.sleep(10) From ddb34907e9677a3cb17921d47fc490df51630f8e Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 14:01:53 -0500 Subject: [PATCH 08/63] Ensure dot notation for local package imports --- mozdef_util/mozdef_util/elasticsearch_client.py | 8 ++++---- mozdef_util/mozdef_util/event.py | 2 +- mozdef_util/mozdef_util/plugin_set.py | 4 ++-- mozdef_util/mozdef_util/query_models/search_query.py | 4 ++-- mozdef_util/mozdef_util/utilities/logger.py | 2 +- tests/alerts/__init__.py | 0 tests/alerts/alert_test_case.py | 2 +- tests/alerts/negative_alert_test_case.py | 2 +- tests/alerts/positive_alert_test_case.py | 2 +- tests/alerts/test_alert_template.template | 8 +++++--- tests/alerts/test_auditd_commands.py | 6 +++--- tests/alerts/test_bruteforce_ssh.py | 6 +++--- tests/alerts/test_cloudtrail_deadman.py | 6 +++--- tests/alerts/test_cloudtrail_excessive_describe.py | 7 ++++--- tests/alerts/test_cloudtrail_logging_disabled.py | 6 +++--- tests/alerts/test_cloudtrail_public_bucket.py | 7 ++++--- tests/alerts/test_deadman.py | 6 +++--- tests/alerts/test_deadman_generic.py | 7 ++++--- tests/alerts/test_duo_authfail.py | 6 +++--- tests/alerts/test_duo_fail_open.py | 6 +++--- tests/alerts/test_feedback_events.py | 6 +++--- tests/alerts/test_geomodel.py | 6 +++--- tests/alerts/test_guard_duty_probe.py | 7 ++++--- tests/alerts/test_honeycomb_case.py | 6 +++--- tests/alerts/test_nsm_scan_address.py | 6 +++--- tests/alerts/test_nsm_scan_port.py | 6 +++--- tests/alerts/test_nsm_scan_random.py | 6 +++--- tests/alerts/test_old_events.py | 6 +++--- tests/alerts/test_open_port_violation.py | 6 +++--- tests/alerts/test_promisc_audit.py | 6 +++--- tests/alerts/test_promisc_kernel.py | 6 +++--- tests/alerts/test_proxy_drop_executable.py | 7 ++++--- tests/alerts/test_proxy_drop_ip.py | 7 ++++--- tests/alerts/test_proxy_drop_non_standard_port.py | 6 +++--- tests/alerts/test_proxy_exfil_domains.py | 7 ++++--- tests/alerts/test_session_opened_sensitive_user.py | 7 ++++--- tests/alerts/test_sqs_queues_deadman.py | 6 +++--- tests/alerts/test_ssh_access.py | 6 +++--- tests/alerts/test_ssh_access_signreleng.py | 6 +++--- tests/alerts/test_ssh_key.py | 6 +++--- tests/alerts/test_ssh_lateral.py | 6 +++--- tests/alerts/test_ssh_password_auth_violation.py | 6 +++--- tests/alerts/test_trace_audit.py | 6 +++--- tests/alerts/test_write_audit.py | 6 +++--- tests/loginput/loginput_test_suite.py | 6 +++--- tests/loginput/test_loginput_index.py | 2 +- tests/mozdef_util/query_models/__init__.py | 0 tests/mozdef_util/query_models/negative_test_suite.py | 5 +---- tests/mozdef_util/query_models/positive_test_suite.py | 5 +---- tests/mozdef_util/query_models/query_test_suite.py | 3 +-- tests/mozdef_util/query_models/test_exists_match.py | 4 ++-- tests/mozdef_util/query_models/test_less_than_match.py | 4 ++-- tests/mozdef_util/query_models/test_phrase_match.py | 4 ++-- tests/mozdef_util/query_models/test_query_string_match.py | 4 ++-- tests/mozdef_util/query_models/test_range_match.py | 4 ++-- tests/mozdef_util/query_models/test_term_match.py | 4 ++-- tests/mozdef_util/query_models/test_terms_match.py | 4 ++-- tests/mozdef_util/query_models/test_wildcard_match.py | 4 ++-- tests/rest/test_rest_index.py | 2 +- 59 files changed, 152 insertions(+), 149 deletions(-) create mode 100644 tests/alerts/__init__.py create mode 100644 tests/mozdef_util/query_models/__init__.py diff --git a/mozdef_util/mozdef_util/elasticsearch_client.py b/mozdef_util/mozdef_util/elasticsearch_client.py index 126471bff..08b07bb63 100644 --- a/mozdef_util/mozdef_util/elasticsearch_client.py +++ b/mozdef_util/mozdef_util/elasticsearch_client.py @@ -5,12 +5,12 @@ from elasticsearch.exceptions import NotFoundError from elasticsearch.helpers import bulk, BulkIndexError -from query_models import SearchQuery, TermMatch, AggregatedResults, SimpleResults -from bulk_queue import BulkQueue +from .query_models import SearchQuery, TermMatch, AggregatedResults, SimpleResults +from .bulk_queue import BulkQueue -from utilities.logger import logger +from .utilities.logger import logger -from event import Event +from .event import Event DOCUMENT_TYPE = '_doc' diff --git a/mozdef_util/mozdef_util/event.py b/mozdef_util/mozdef_util/event.py index e42099c2f..6ee51a5c2 100644 --- a/mozdef_util/mozdef_util/event.py +++ b/mozdef_util/mozdef_util/event.py @@ -1,7 +1,7 @@ from datetime import datetime import socket -from utilities.toUTC import toUTC +from .utilities.toUTC import toUTC class Event(dict): diff --git a/mozdef_util/mozdef_util/plugin_set.py b/mozdef_util/mozdef_util/plugin_set.py index 62b431c31..acaaf04f8 100644 --- a/mozdef_util/mozdef_util/plugin_set.py +++ b/mozdef_util/mozdef_util/plugin_set.py @@ -1,8 +1,8 @@ import os import pynsive from operator import itemgetter -from utilities.dict2List import dict2List -from utilities.logger import logger +from .utilities.dict2List import dict2List +from .utilities.logger import logger class PluginSet(object): diff --git a/mozdef_util/mozdef_util/query_models/search_query.py b/mozdef_util/mozdef_util/query_models/search_query.py index 4bfc4ee89..3ec82fcff 100644 --- a/mozdef_util/mozdef_util/query_models/search_query.py +++ b/mozdef_util/mozdef_util/query_models/search_query.py @@ -11,8 +11,8 @@ from datetime import datetime from datetime import timedelta -from range_match import RangeMatch -from boolean_match import BooleanMatch +from .range_match import RangeMatch +from .boolean_match import BooleanMatch class SearchQuery(object): diff --git a/mozdef_util/mozdef_util/utilities/logger.py b/mozdef_util/mozdef_util/utilities/logger.py index 4a83d76d7..3128fad8f 100644 --- a/mozdef_util/mozdef_util/utilities/logger.py +++ b/mozdef_util/mozdef_util/utilities/logger.py @@ -10,7 +10,7 @@ from datetime import datetime from logging.handlers import SysLogHandler -from toUTC import toUTC +from .toUTC import toUTC def loggerTimeStamp(self, record, datefmt=None): diff --git a/tests/alerts/__init__.py b/tests/alerts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/alerts/alert_test_case.py b/tests/alerts/alert_test_case.py index 976bbb726..c9cb1ecf1 100644 --- a/tests/alerts/alert_test_case.py +++ b/tests/alerts/alert_test_case.py @@ -2,7 +2,7 @@ import sys sys.path.append(os.path.join(os.path.dirname(__file__), "../../alerts")) -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class AlertTestCase(object): diff --git a/tests/alerts/negative_alert_test_case.py b/tests/alerts/negative_alert_test_case.py index 9ba79443e..0564584b0 100644 --- a/tests/alerts/negative_alert_test_case.py +++ b/tests/alerts/negative_alert_test_case.py @@ -1,4 +1,4 @@ -from alert_test_case import AlertTestCase +from .alert_test_case import AlertTestCase class NegativeAlertTestCase(AlertTestCase): diff --git a/tests/alerts/positive_alert_test_case.py b/tests/alerts/positive_alert_test_case.py index 8f9ae2abf..caf824616 100644 --- a/tests/alerts/positive_alert_test_case.py +++ b/tests/alerts/positive_alert_test_case.py @@ -1,4 +1,4 @@ -from alert_test_case import AlertTestCase +from .alert_test_case import AlertTestCase class PositiveAlertTestCase(AlertTestCase): diff --git a/tests/alerts/test_alert_template.template b/tests/alerts/test_alert_template.template index 270f362b8..23f173e2b 100644 --- a/tests/alerts/test_alert_template.template +++ b/tests/alerts/test_alert_template.template @@ -2,9 +2,11 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite + +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase + +from .alert_test_suite import AlertTestSuite class TEMPLATE_TEST_CLASSNAME(AlertTestSuite): diff --git a/tests/alerts/test_auditd_commands.py b/tests/alerts/test_auditd_commands.py index 90ccd573c..1cc9dcd75 100644 --- a/tests/alerts/test_auditd_commands.py +++ b/tests/alerts/test_auditd_commands.py @@ -1,7 +1,7 @@ -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertAuditdCommands(AlertTestSuite): diff --git a/tests/alerts/test_bruteforce_ssh.py b/tests/alerts/test_bruteforce_ssh.py index 1c423fa0a..325dfc8db 100644 --- a/tests/alerts/test_bruteforce_ssh.py +++ b/tests/alerts/test_bruteforce_ssh.py @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertBruteforceSsh(AlertTestSuite): diff --git a/tests/alerts/test_cloudtrail_deadman.py b/tests/alerts/test_cloudtrail_deadman.py index 8d1c66fed..ff7fc8a59 100644 --- a/tests/alerts/test_cloudtrail_deadman.py +++ b/tests/alerts/test_cloudtrail_deadman.py @@ -1,7 +1,7 @@ -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertCloudtrailDeadman(AlertTestSuite): diff --git a/tests/alerts/test_cloudtrail_excessive_describe.py b/tests/alerts/test_cloudtrail_excessive_describe.py index df20609c6..4ad295c15 100644 --- a/tests/alerts/test_cloudtrail_excessive_describe.py +++ b/tests/alerts/test_cloudtrail_excessive_describe.py @@ -2,9 +2,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase + +from .alert_test_suite import AlertTestSuite class TestCloudtrailExcessiveDescribe(AlertTestSuite): diff --git a/tests/alerts/test_cloudtrail_logging_disabled.py b/tests/alerts/test_cloudtrail_logging_disabled.py index 5c7abe3ae..835878324 100644 --- a/tests/alerts/test_cloudtrail_logging_disabled.py +++ b/tests/alerts/test_cloudtrail_logging_disabled.py @@ -1,7 +1,7 @@ -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertCloudtrailLoggingDisabled(AlertTestSuite): diff --git a/tests/alerts/test_cloudtrail_public_bucket.py b/tests/alerts/test_cloudtrail_public_bucket.py index 3c8ebebd6..43cd7fa23 100644 --- a/tests/alerts/test_cloudtrail_public_bucket.py +++ b/tests/alerts/test_cloudtrail_public_bucket.py @@ -2,9 +2,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase + +from .alert_test_suite import AlertTestSuite class TestCloudtrailPublicBucket(AlertTestSuite): diff --git a/tests/alerts/test_deadman.py b/tests/alerts/test_deadman.py index e67b1beb9..3f388f3d8 100644 --- a/tests/alerts/test_deadman.py +++ b/tests/alerts/test_deadman.py @@ -1,7 +1,7 @@ -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertDeadman(AlertTestSuite): diff --git a/tests/alerts/test_deadman_generic.py b/tests/alerts/test_deadman_generic.py index 8ed8e3174..d14a9befd 100644 --- a/tests/alerts/test_deadman_generic.py +++ b/tests/alerts/test_deadman_generic.py @@ -2,9 +2,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase + +from .alert_test_suite import AlertTestSuite class TestDeadmanGeneric(AlertTestSuite): diff --git a/tests/alerts/test_duo_authfail.py b/tests/alerts/test_duo_authfail.py index 147cbb4f7..95bf0f625 100644 --- a/tests/alerts/test_duo_authfail.py +++ b/tests/alerts/test_duo_authfail.py @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertDuoAuthFail(AlertTestSuite): diff --git a/tests/alerts/test_duo_fail_open.py b/tests/alerts/test_duo_fail_open.py index b6daab14d..e4a0d4f4c 100644 --- a/tests/alerts/test_duo_fail_open.py +++ b/tests/alerts/test_duo_fail_open.py @@ -1,7 +1,7 @@ -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertDuoFailOpen(AlertTestSuite): diff --git a/tests/alerts/test_feedback_events.py b/tests/alerts/test_feedback_events.py index b60f03202..8ea481cf3 100644 --- a/tests/alerts/test_feedback_events.py +++ b/tests/alerts/test_feedback_events.py @@ -5,10 +5,10 @@ import json -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertFeedbackEvents(AlertTestSuite): diff --git a/tests/alerts/test_geomodel.py b/tests/alerts/test_geomodel.py index 6d7d0e1ff..3cd042ea1 100644 --- a/tests/alerts/test_geomodel.py +++ b/tests/alerts/test_geomodel.py @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertGeomodel(AlertTestSuite): diff --git a/tests/alerts/test_guard_duty_probe.py b/tests/alerts/test_guard_duty_probe.py index eec48e34a..2a6d11935 100644 --- a/tests/alerts/test_guard_duty_probe.py +++ b/tests/alerts/test_guard_duty_probe.py @@ -2,9 +2,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase + +from .alert_test_suite import AlertTestSuite class TestGuardDutyProbe(AlertTestSuite): diff --git a/tests/alerts/test_honeycomb_case.py b/tests/alerts/test_honeycomb_case.py index f3c00ca5a..1277015fa 100644 --- a/tests/alerts/test_honeycomb_case.py +++ b/tests/alerts/test_honeycomb_case.py @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertHoneycomb(AlertTestSuite): diff --git a/tests/alerts/test_nsm_scan_address.py b/tests/alerts/test_nsm_scan_address.py index 41ca4b372..e6efd9049 100644 --- a/tests/alerts/test_nsm_scan_address.py +++ b/tests/alerts/test_nsm_scan_address.py @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestNSMScanAddress(AlertTestSuite): diff --git a/tests/alerts/test_nsm_scan_port.py b/tests/alerts/test_nsm_scan_port.py index 6f0917730..c411122a6 100644 --- a/tests/alerts/test_nsm_scan_port.py +++ b/tests/alerts/test_nsm_scan_port.py @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestNSMScanPort(AlertTestSuite): diff --git a/tests/alerts/test_nsm_scan_random.py b/tests/alerts/test_nsm_scan_random.py index 3065a6daa..e79de5edf 100644 --- a/tests/alerts/test_nsm_scan_random.py +++ b/tests/alerts/test_nsm_scan_random.py @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestNSMScanRandom(AlertTestSuite): diff --git a/tests/alerts/test_old_events.py b/tests/alerts/test_old_events.py index 4017bb442..7473d911d 100644 --- a/tests/alerts/test_old_events.py +++ b/tests/alerts/test_old_events.py @@ -1,7 +1,7 @@ -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestOldEvents(AlertTestSuite): diff --git a/tests/alerts/test_open_port_violation.py b/tests/alerts/test_open_port_violation.py index cb069b7c0..c153efcdb 100644 --- a/tests/alerts/test_open_port_violation.py +++ b/tests/alerts/test_open_port_violation.py @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertOpenPortViolation(AlertTestSuite): diff --git a/tests/alerts/test_promisc_audit.py b/tests/alerts/test_promisc_audit.py index 8df74c2a9..fd37f8b50 100644 --- a/tests/alerts/test_promisc_audit.py +++ b/tests/alerts/test_promisc_audit.py @@ -1,7 +1,7 @@ -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestPromiscAudit(AlertTestSuite): diff --git a/tests/alerts/test_promisc_kernel.py b/tests/alerts/test_promisc_kernel.py index d605488c0..64b9a5482 100644 --- a/tests/alerts/test_promisc_kernel.py +++ b/tests/alerts/test_promisc_kernel.py @@ -1,7 +1,7 @@ -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestPromiscKernel(AlertTestSuite): diff --git a/tests/alerts/test_proxy_drop_executable.py b/tests/alerts/test_proxy_drop_executable.py index b477eb138..1ab09ddac 100644 --- a/tests/alerts/test_proxy_drop_executable.py +++ b/tests/alerts/test_proxy_drop_executable.py @@ -2,9 +2,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase + +from .alert_test_suite import AlertTestSuite class TestAlertProxyDropExecutable(AlertTestSuite): diff --git a/tests/alerts/test_proxy_drop_ip.py b/tests/alerts/test_proxy_drop_ip.py index d9d88e549..cfbaeefc7 100644 --- a/tests/alerts/test_proxy_drop_ip.py +++ b/tests/alerts/test_proxy_drop_ip.py @@ -2,9 +2,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase + +from .alert_test_suite import AlertTestSuite class TestAlertProxyDropIP(AlertTestSuite): diff --git a/tests/alerts/test_proxy_drop_non_standard_port.py b/tests/alerts/test_proxy_drop_non_standard_port.py index 8abeee3ce..028dfbe78 100644 --- a/tests/alerts/test_proxy_drop_non_standard_port.py +++ b/tests/alerts/test_proxy_drop_non_standard_port.py @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertProxyDropNonStandardPort(AlertTestSuite): diff --git a/tests/alerts/test_proxy_exfil_domains.py b/tests/alerts/test_proxy_exfil_domains.py index 11e8c6a12..252919873 100644 --- a/tests/alerts/test_proxy_exfil_domains.py +++ b/tests/alerts/test_proxy_exfil_domains.py @@ -2,9 +2,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase + +from .alert_test_suite import AlertTestSuite class TestProxyExfilDomains(AlertTestSuite): diff --git a/tests/alerts/test_session_opened_sensitive_user.py b/tests/alerts/test_session_opened_sensitive_user.py index eecb0334c..8d5f03db4 100644 --- a/tests/alerts/test_session_opened_sensitive_user.py +++ b/tests/alerts/test_session_opened_sensitive_user.py @@ -5,9 +5,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase + +from .alert_test_suite import AlertTestSuite class TestSessionOpenedUser(AlertTestSuite): diff --git a/tests/alerts/test_sqs_queues_deadman.py b/tests/alerts/test_sqs_queues_deadman.py index b5505ab3a..da0d2e0a7 100644 --- a/tests/alerts/test_sqs_queues_deadman.py +++ b/tests/alerts/test_sqs_queues_deadman.py @@ -5,10 +5,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertSQSQueuesDeadman(AlertTestSuite): diff --git a/tests/alerts/test_ssh_access.py b/tests/alerts/test_ssh_access.py index 3c3053185..88882af37 100644 --- a/tests/alerts/test_ssh_access.py +++ b/tests/alerts/test_ssh_access.py @@ -1,7 +1,7 @@ -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertSSHAccess(AlertTestSuite): diff --git a/tests/alerts/test_ssh_access_signreleng.py b/tests/alerts/test_ssh_access_signreleng.py index 3091157cc..7a1ccebf7 100644 --- a/tests/alerts/test_ssh_access_signreleng.py +++ b/tests/alerts/test_ssh_access_signreleng.py @@ -1,7 +1,7 @@ -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertSSHAccessSignReleng(AlertTestSuite): diff --git a/tests/alerts/test_ssh_key.py b/tests/alerts/test_ssh_key.py index 8af73cdec..82c336e89 100644 --- a/tests/alerts/test_ssh_key.py +++ b/tests/alerts/test_ssh_key.py @@ -1,7 +1,7 @@ -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestSSHKey(AlertTestSuite): diff --git a/tests/alerts/test_ssh_lateral.py b/tests/alerts/test_ssh_lateral.py index 513cb65d2..279cb2b49 100644 --- a/tests/alerts/test_ssh_lateral.py +++ b/tests/alerts/test_ssh_lateral.py @@ -1,7 +1,7 @@ -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestSSHLateral(AlertTestSuite): diff --git a/tests/alerts/test_ssh_password_auth_violation.py b/tests/alerts/test_ssh_password_auth_violation.py index a844aa249..bb7d728ed 100644 --- a/tests/alerts/test_ssh_password_auth_violation.py +++ b/tests/alerts/test_ssh_password_auth_violation.py @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestAlertSSHPasswordAuthViolation(AlertTestSuite): diff --git a/tests/alerts/test_trace_audit.py b/tests/alerts/test_trace_audit.py index 02367f4cb..917ed26e3 100644 --- a/tests/alerts/test_trace_audit.py +++ b/tests/alerts/test_trace_audit.py @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestTraceAudit(AlertTestSuite): diff --git a/tests/alerts/test_write_audit.py b/tests/alerts/test_write_audit.py index 137c369af..fe011f874 100644 --- a/tests/alerts/test_write_audit.py +++ b/tests/alerts/test_write_audit.py @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2017 Mozilla Corporation -from positive_alert_test_case import PositiveAlertTestCase -from negative_alert_test_case import NegativeAlertTestCase +from .positive_alert_test_case import PositiveAlertTestCase +from .negative_alert_test_case import NegativeAlertTestCase -from alert_test_suite import AlertTestSuite +from .alert_test_suite import AlertTestSuite class TestWriteAudit(AlertTestSuite): diff --git a/tests/loginput/loginput_test_suite.py b/tests/loginput/loginput_test_suite.py index 10ebfbf33..5320f50fe 100644 --- a/tests/loginput/loginput_test_suite.py +++ b/tests/loginput/loginput_test_suite.py @@ -1,14 +1,14 @@ import sys import os -sys.path.append(os.path.join(os.path.dirname(__file__), "../")) -from http_test_suite import HTTPTestSuite - from mozdef_util.utilities.dot_dict import DotDict import mock from configlib import OptionParser +sys.path.append(os.path.join(os.path.dirname(__file__), "../")) +from http_test_suite import HTTPTestSuite + class LoginputTestSuite(HTTPTestSuite): diff --git a/tests/loginput/test_loginput_index.py b/tests/loginput/test_loginput_index.py index 1e5273f8c..fc78a33c8 100644 --- a/tests/loginput/test_loginput_index.py +++ b/tests/loginput/test_loginput_index.py @@ -1,4 +1,4 @@ -from loginput_test_suite import LoginputTestSuite +from .loginput_test_suite import LoginputTestSuite class TestTestRoute(LoginputTestSuite): diff --git a/tests/mozdef_util/query_models/__init__.py b/tests/mozdef_util/query_models/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/mozdef_util/query_models/negative_test_suite.py b/tests/mozdef_util/query_models/negative_test_suite.py index c27817cf8..d59ef35b4 100644 --- a/tests/mozdef_util/query_models/negative_test_suite.py +++ b/tests/mozdef_util/query_models/negative_test_suite.py @@ -1,7 +1,4 @@ -import os -import sys -sys.path.append(os.path.join(os.path.dirname(__file__), "../")) -from query_test_suite import QueryTestSuite +from .query_test_suite import QueryTestSuite class NegativeTestSuite(QueryTestSuite): diff --git a/tests/mozdef_util/query_models/positive_test_suite.py b/tests/mozdef_util/query_models/positive_test_suite.py index e2352d2f1..564ca0bcc 100644 --- a/tests/mozdef_util/query_models/positive_test_suite.py +++ b/tests/mozdef_util/query_models/positive_test_suite.py @@ -1,7 +1,4 @@ -import os -import sys -sys.path.append(os.path.join(os.path.dirname(__file__), "../")) -from query_test_suite import QueryTestSuite +from .query_test_suite import QueryTestSuite class PositiveTestSuite(QueryTestSuite): diff --git a/tests/mozdef_util/query_models/query_test_suite.py b/tests/mozdef_util/query_models/query_test_suite.py index 7454b69b2..56a607796 100644 --- a/tests/mozdef_util/query_models/query_test_suite.py +++ b/tests/mozdef_util/query_models/query_test_suite.py @@ -7,13 +7,12 @@ import os import sys +import pytest from mozdef_util.query_models import SearchQuery sys.path.append(os.path.join(os.path.dirname(__file__), "../../")) from unit_test_suite import UnitTestSuite -import pytest - class QueryTestSuite(UnitTestSuite): diff --git a/tests/mozdef_util/query_models/test_exists_match.py b/tests/mozdef_util/query_models/test_exists_match.py index 4c679f742..220341691 100644 --- a/tests/mozdef_util/query_models/test_exists_match.py +++ b/tests/mozdef_util/query_models/test_exists_match.py @@ -1,5 +1,5 @@ -from positive_test_suite import PositiveTestSuite -from negative_test_suite import NegativeTestSuite +from .positive_test_suite import PositiveTestSuite +from .negative_test_suite import NegativeTestSuite from mozdef_util.query_models import ExistsMatch diff --git a/tests/mozdef_util/query_models/test_less_than_match.py b/tests/mozdef_util/query_models/test_less_than_match.py index 3f5de83fd..9010e74c9 100644 --- a/tests/mozdef_util/query_models/test_less_than_match.py +++ b/tests/mozdef_util/query_models/test_less_than_match.py @@ -1,5 +1,5 @@ -from positive_test_suite import PositiveTestSuite -from negative_test_suite import NegativeTestSuite +from .positive_test_suite import PositiveTestSuite +from .negative_test_suite import NegativeTestSuite from mozdef_util.query_models import LessThanMatch diff --git a/tests/mozdef_util/query_models/test_phrase_match.py b/tests/mozdef_util/query_models/test_phrase_match.py index 7a948f676..368579687 100644 --- a/tests/mozdef_util/query_models/test_phrase_match.py +++ b/tests/mozdef_util/query_models/test_phrase_match.py @@ -1,5 +1,5 @@ -from positive_test_suite import PositiveTestSuite -from negative_test_suite import NegativeTestSuite +from .positive_test_suite import PositiveTestSuite +from .negative_test_suite import NegativeTestSuite from mozdef_util.query_models import PhraseMatch diff --git a/tests/mozdef_util/query_models/test_query_string_match.py b/tests/mozdef_util/query_models/test_query_string_match.py index 7be4305c0..54b9fd7d6 100644 --- a/tests/mozdef_util/query_models/test_query_string_match.py +++ b/tests/mozdef_util/query_models/test_query_string_match.py @@ -1,5 +1,5 @@ -from positive_test_suite import PositiveTestSuite -from negative_test_suite import NegativeTestSuite +from .positive_test_suite import PositiveTestSuite +from .negative_test_suite import NegativeTestSuite from mozdef_util.query_models import QueryStringMatch diff --git a/tests/mozdef_util/query_models/test_range_match.py b/tests/mozdef_util/query_models/test_range_match.py index 7a63e6156..d8061188b 100644 --- a/tests/mozdef_util/query_models/test_range_match.py +++ b/tests/mozdef_util/query_models/test_range_match.py @@ -1,5 +1,5 @@ -from positive_test_suite import PositiveTestSuite -from negative_test_suite import NegativeTestSuite +from .positive_test_suite import PositiveTestSuite +from .negative_test_suite import NegativeTestSuite from mozdef_util.query_models import RangeMatch diff --git a/tests/mozdef_util/query_models/test_term_match.py b/tests/mozdef_util/query_models/test_term_match.py index 4beb848ee..71e451c5a 100644 --- a/tests/mozdef_util/query_models/test_term_match.py +++ b/tests/mozdef_util/query_models/test_term_match.py @@ -1,5 +1,5 @@ -from positive_test_suite import PositiveTestSuite -from negative_test_suite import NegativeTestSuite +from .positive_test_suite import PositiveTestSuite +from .negative_test_suite import NegativeTestSuite from mozdef_util.query_models import TermMatch diff --git a/tests/mozdef_util/query_models/test_terms_match.py b/tests/mozdef_util/query_models/test_terms_match.py index 752364105..5bc3ce573 100644 --- a/tests/mozdef_util/query_models/test_terms_match.py +++ b/tests/mozdef_util/query_models/test_terms_match.py @@ -1,5 +1,5 @@ -from positive_test_suite import PositiveTestSuite -from negative_test_suite import NegativeTestSuite +from .positive_test_suite import PositiveTestSuite +from .negative_test_suite import NegativeTestSuite from mozdef_util.query_models import TermsMatch diff --git a/tests/mozdef_util/query_models/test_wildcard_match.py b/tests/mozdef_util/query_models/test_wildcard_match.py index 77a450b5e..75e142f26 100644 --- a/tests/mozdef_util/query_models/test_wildcard_match.py +++ b/tests/mozdef_util/query_models/test_wildcard_match.py @@ -1,5 +1,5 @@ -from positive_test_suite import PositiveTestSuite -from negative_test_suite import NegativeTestSuite +from .positive_test_suite import PositiveTestSuite +from .negative_test_suite import NegativeTestSuite from mozdef_util.query_models import WildcardMatch diff --git a/tests/rest/test_rest_index.py b/tests/rest/test_rest_index.py index 5e4171057..97a9c2e0f 100644 --- a/tests/rest/test_rest_index.py +++ b/tests/rest/test_rest_index.py @@ -13,7 +13,7 @@ import pytest from dateutil.parser import parse -from rest_test_suite import RestTestSuite +from .rest_test_suite import RestTestSuite class TestTestRoute(RestTestSuite): From 282c48766bd387bfdd2360538768cdf3527d5295 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 16:33:10 -0500 Subject: [PATCH 09/63] Fix toUTC tests --- mozdef_util/mozdef_util/utilities/toUTC.py | 2 +- tests/mozdef_util/utilities/test_toUTC.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/mozdef_util/mozdef_util/utilities/toUTC.py b/mozdef_util/mozdef_util/utilities/toUTC.py index 9e8ee4478..2146859ef 100644 --- a/mozdef_util/mozdef_util/utilities/toUTC.py +++ b/mozdef_util/mozdef_util/utilities/toUTC.py @@ -30,7 +30,7 @@ def toUTC(suspectedDate): # epoch? but seconds/milliseconds/nanoseconds (lookin at you heka) epochDivisor = int(str(1) + '0' * (len(str(suspectedDate)) % 10)) objDate = datetime.fromtimestamp(float(suspectedDate / epochDivisor), LOCAL_TIMEZONE) - elif type(suspectedDate) in (str, unicode): + elif type(suspectedDate) is str: # try to parse float or negative number from string: objDate = None try: diff --git a/tests/mozdef_util/utilities/test_toUTC.py b/tests/mozdef_util/utilities/test_toUTC.py index f32fd8e1b..7d0d4098c 100644 --- a/tests/mozdef_util/utilities/test_toUTC.py +++ b/tests/mozdef_util/utilities/test_toUTC.py @@ -1,6 +1,7 @@ from datetime import datetime, date from dateutil.parser import parse +import importlib import sys import pytz @@ -23,7 +24,7 @@ def utc_timezone(): tzlocal.get_localzone = utc_timezone if 'mozdef_util.utilities.toUTC' in sys.modules: - reload(sys.modules['mozdef_util.utilities.toUTC']) + importlib.reload(sys.modules['mozdef_util.utilities.toUTC']) class TestToUTC(): From 3a37b42bcafc8416ef694e53790a5932bb483def Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 16:40:38 -0500 Subject: [PATCH 10/63] Fix exception message function no longer available --- mozdef_util/mozdef_util/geo_ip.py | 2 +- mozdef_util/mozdef_util/plugin_set.py | 4 ++-- mq/esworker_cloudtrail.py | 8 ++++---- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/mozdef_util/mozdef_util/geo_ip.py b/mozdef_util/mozdef_util/geo_ip.py index e1a8eda89..ae805b540 100644 --- a/mozdef_util/mozdef_util/geo_ip.py +++ b/mozdef_util/mozdef_util/geo_ip.py @@ -15,7 +15,7 @@ def lookup_ip(self, ip): try: result = self.db.city(ip) except Exception as e: - return {'error': e.message} + return {'error': str(e)} geo_dict = {} geo_dict['city'] = result.city.name diff --git a/mozdef_util/mozdef_util/plugin_set.py b/mozdef_util/mozdef_util/plugin_set.py index acaaf04f8..249887439 100644 --- a/mozdef_util/mozdef_util/plugin_set.py +++ b/mozdef_util/mozdef_util/plugin_set.py @@ -50,7 +50,7 @@ def identify_plugins(self, enabled_plugins): } ) except Exception as e: - logger.exception('Received exception when loading {0} plugins\n{1}'.format(module_name, e.message)) + logger.exception('Received exception when loading {0} plugins\n{1}'.format(module_name, e)) plugin_manager.destroy() return plugins @@ -84,7 +84,7 @@ def run_plugins(self, message, metadata=None): try: (message, metadata) = self.send_message_to_plugin(plugin_class=plugin['plugin_class'], message=message, metadata=metadata) except Exception as e: - logger.exception('Received exception in {0}: message: {1}\n{2}'.format(plugin['plugin_class'], message, e.message)) + logger.exception('Received exception in {0}: message: {1}\n{2}'.format(plugin['plugin_class'], message, e)) if message is None: return (message, metadata) return (message, metadata) diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index ec7b2481b..a64f7bf97 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -58,7 +58,7 @@ def __init__(self, region_name='us-east-1', aws_access_key_id=None, aws_secret_a self.aws_access_key_id, self.aws_secret_access_key)) except Exception, e: - logger.error("Unable to connect to STS due to exception %s" % e.message) + logger.error("Unable to connect to STS due to exception {0}".format(e)) raise if self.aws_access_key_id is not None or self.aws_secret_access_key is not None: @@ -67,7 +67,7 @@ def __init__(self, region_name='us-east-1', aws_access_key_id=None, aws_secret_a if self.session_credentials is None or self.session_credentials.is_expired(): self.session_credentials = self.local_conn_sts.get_session_token() except Exception, e: - logger.error("Unable to get session token due to exception %s" % e.message) + logger.error("Unable to get session token due to exception {0}".format(e)) raise try: creds = get_aws_credentials( @@ -77,7 +77,7 @@ def __init__(self, region_name='us-east-1', aws_access_key_id=None, aws_secret_a self.session_credentials.session_token) if self.session_credentials else {} self.session_conn_sts = boto.sts.connect_to_region(**creds) except Exception, e: - logger.error("Unable to connect to STS with session token due to exception %s" % e.message) + logger.error("Unable to connect to STS with session token due to exception {0}".format(e)) raise self.conn_sts = self.session_conn_sts else: @@ -104,7 +104,7 @@ def assume_role(self, policy=policy).credentials logger.debug("Assumed new role with credential %s" % self.credentials[role_arn].to_dict()) except Exception, e: - logger.error("Unable to assume role %s due to exception %s" % (role_arn, e.message)) + logger.error("Unable to assume role {0} due to exception {1}".format(role_arn, e)) self.credentials[role_arn] = False return self.credentials[role_arn] From b2ca33ed7ac8a6005605dbffed87e0776713a940 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 16:43:52 -0500 Subject: [PATCH 11/63] Fixup reload function namespace --- bot/slack/bot_plugin_set.py | 3 ++- mozdef_util/mozdef_util/plugin_set.py | 4 +++- mq/lib/plugins.py | 3 ++- rest/index.py | 3 ++- 4 files changed, 9 insertions(+), 4 deletions(-) diff --git a/bot/slack/bot_plugin_set.py b/bot/slack/bot_plugin_set.py index 475250229..0b98d261f 100644 --- a/bot/slack/bot_plugin_set.py +++ b/bot/slack/bot_plugin_set.py @@ -1,5 +1,6 @@ import os import pynsive +import importlib from mozdef_util.utilities.logger import logger @@ -32,7 +33,7 @@ def identify_plugins(self, enabled_plugins): continue module_obj = pynsive.import_module(found_module) - reload(module_obj) + importlib.reload(module_obj) plugin_class_obj = module_obj.Command() logger.info('Plugin {0} registered to receive command with {1}'.format(module_name, plugin_class_obj.command_name)) plugins.append( diff --git a/mozdef_util/mozdef_util/plugin_set.py b/mozdef_util/mozdef_util/plugin_set.py index 249887439..9eda8cf7f 100644 --- a/mozdef_util/mozdef_util/plugin_set.py +++ b/mozdef_util/mozdef_util/plugin_set.py @@ -1,6 +1,8 @@ import os import pynsive +import importlib from operator import itemgetter + from .utilities.dict2List import dict2List from .utilities.logger import logger @@ -33,7 +35,7 @@ def identify_plugins(self, enabled_plugins): try: module_obj = pynsive.import_module(found_module) - reload(module_obj) + importlib.reload(module_obj) plugin_class_obj = module_obj.message() if 'priority' in dir(plugin_class_obj): diff --git a/mq/lib/plugins.py b/mq/lib/plugins.py index 1401d741e..c59e23793 100644 --- a/mq/lib/plugins.py +++ b/mq/lib/plugins.py @@ -10,6 +10,7 @@ from operator import itemgetter from datetime import datetime import pynsive +import importlib from mozdef_util.utilities.dict2List import dict2List from mozdef_util.utilities.logger import logger @@ -61,7 +62,7 @@ def registerPlugins(): modules = pynsive.list_modules('plugins') for mname in modules: module = pynsive.import_module(mname) - reload(module) + importlib.reload(module) if not module: raise ImportError('Unable to load module {}'.format(mname)) else: diff --git a/rest/index.py b/rest/index.py index 5dbe0be33..2903060bb 100644 --- a/rest/index.py +++ b/rest/index.py @@ -13,6 +13,7 @@ import requests import sys import socket +import importlib from bottle import route, run, response, request, default_app, post from datetime import datetime, timedelta from configlib import getConfig, OptionParser @@ -461,7 +462,7 @@ def registerPlugins(): modules = pynsive.list_modules(module_name) for mfile in modules: module = pynsive.import_module(mfile) - reload(module) + importlib.reload(module) if not module: raise ImportError('Unable to load module {}'.format(mfile)) else: From bd4c48db9a73e542995d977ba3fce81b9d8cbc69 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 16:49:30 -0500 Subject: [PATCH 12/63] Rename iteritems to items for dictionaries --- alerts/feedback_events.py | 2 +- alerts/lib/alerttask.py | 2 +- alerts/ssh_access_signreleng.py | 2 +- bot/irc/mozdefbot.py | 2 +- bot/slack/slack_bot.py | 2 +- cloudy_mozdef/lambda_layer/build/lib/alerttask.py | 2 +- cron/auth02mozdef.py | 2 +- cron/collectAttackers.py | 2 +- cron/google2mozdef.py | 2 +- mozdef_util/mozdef_util/query_models/aggregated_results.py | 2 +- mozdef_util/mozdef_util/utilities/dict2List.py | 2 +- mq/esworker_cloudtrail.py | 4 ++-- mq/esworker_eventtask.py | 2 +- mq/esworker_papertrail.py | 4 ++-- mq/esworker_sns_sqs.py | 6 +++--- mq/esworker_sqs.py | 4 ++-- mq/plugins/auditdFixup.py | 2 +- tests/alerts/alert_test_suite.py | 4 ++-- tests/mozdef_util/query_models/query_test_suite.py | 2 +- tests/unit_test_suite.py | 2 +- 20 files changed, 26 insertions(+), 26 deletions(-) diff --git a/alerts/feedback_events.py b/alerts/feedback_events.py index 3a939e38f..b9152283a 100644 --- a/alerts/feedback_events.py +++ b/alerts/feedback_events.py @@ -33,7 +33,7 @@ def onEvent(self, event): event_date = event['_source']['details']['alert_information']['date'] summary = u"{} escalated alert within single-sign on (SSO) dashboard. Event Date: {} Summary: \"{}\"".format(user, event_date, event_summary) - for alert_code, tag in self._config.iteritems(): + for alert_code, tag in self._config.items(): if event['_source']['details']['alert_information']['alert_code'] == alert_code: tags.append(tag) diff --git a/alerts/lib/alerttask.py b/alerts/lib/alerttask.py index 1313228e2..de523781c 100644 --- a/alerts/lib/alerttask.py +++ b/alerts/lib/alerttask.py @@ -35,7 +35,7 @@ def keypaths(nested): """ return a list of nested dict key paths like: [u'_source', u'details', u'program'] """ - for key, value in nested.iteritems(): + for key, value in nested.items(): if isinstance(value, collections.Mapping): for subkey, subvalue in keypaths(value): yield [key] + subkey, subvalue diff --git a/alerts/ssh_access_signreleng.py b/alerts/ssh_access_signreleng.py index e74b66fd5..a2cf5c66d 100644 --- a/alerts/ssh_access_signreleng.py +++ b/alerts/ssh_access_signreleng.py @@ -28,7 +28,7 @@ def main(self): for exclusion in self.config['exclusions']: exclusion_query = None - for key, value in exclusion.iteritems(): + for key, value in exclusion.items(): phrase_exclusion = PhraseMatch(key, value) if exclusion_query is None: exclusion_query = phrase_exclusion diff --git a/bot/irc/mozdefbot.py b/bot/irc/mozdefbot.py index 045bef3e2..ad185f716 100755 --- a/bot/irc/mozdefbot.py +++ b/bot/irc/mozdefbot.py @@ -402,7 +402,7 @@ def initConfig(): # Our config parser stomps out the '#' so we gotta readd channelkeys = {} - for key, value in options.channelkeys.iteritems(): + for key, value in options.channelkeys.items(): if not key.startswith('#'): key = '#{0}'.format(key) channelkeys[key] = value diff --git a/bot/slack/slack_bot.py b/bot/slack/slack_bot.py index 4dc878642..ed9cc5390 100644 --- a/bot/slack/slack_bot.py +++ b/bot/slack/slack_bot.py @@ -50,7 +50,7 @@ def delegate_command(self, message_text): if command == '!help': response = "\nHelp is on it's way...try these:\n" - for command_name, plugin in self.plugins.iteritems(): + for command_name, plugin in self.plugins.items(): response += "\n{0} -- {1}".format( command_name, plugin['help_text'] diff --git a/cloudy_mozdef/lambda_layer/build/lib/alerttask.py b/cloudy_mozdef/lambda_layer/build/lib/alerttask.py index 0940379b8..84e9b3223 100644 --- a/cloudy_mozdef/lambda_layer/build/lib/alerttask.py +++ b/cloudy_mozdef/lambda_layer/build/lib/alerttask.py @@ -35,7 +35,7 @@ def keypaths(nested): """ return a list of nested dict key paths like: [u'_source', u'details', u'program'] """ - for key, value in nested.iteritems(): + for key, value in nested.items(): if isinstance(value, collections.Mapping): for subkey, subvalue in keypaths(value): yield [key] + subkey, subvalue diff --git a/cron/auth02mozdef.py b/cron/auth02mozdef.py index a1a86f7e2..96be095e1 100644 --- a/cron/auth02mozdef.py +++ b/cron/auth02mozdef.py @@ -249,7 +249,7 @@ def save_state(fpath, state): def byteify(input): """Convert input to ascii""" if isinstance(input, dict): - return {byteify(key): byteify(value) for key, value in input.iteritems()} + return {byteify(key): byteify(value) for key, value in input.items()} elif isinstance(input, list): return [byteify(element) for element in input] elif isinstance(input, unicode): diff --git a/cron/collectAttackers.py b/cron/collectAttackers.py index 7bb1b6466..3a1ff01d6 100755 --- a/cron/collectAttackers.py +++ b/cron/collectAttackers.py @@ -46,7 +46,7 @@ def keypaths(nested): ''' return a list of nested dict key paths like: [u'_source', u'details', u'hostname'] ''' - for key, value in nested.iteritems(): + for key, value in nested.items(): if isinstance(value, collections.Mapping): for subkey, subvalue in keypaths(value): yield [key] + subkey, subvalue diff --git a/cron/google2mozdef.py b/cron/google2mozdef.py index 3c1fb5fd7..d8a7901be 100755 --- a/cron/google2mozdef.py +++ b/cron/google2mozdef.py @@ -52,7 +52,7 @@ def flattenDict(inDict, pre=None, values=True): ''' pre = pre[:] if pre else [] if isinstance(inDict, dict): - for key, value in inDict.iteritems(): + for key, value in inDict.items(): if isinstance(value, dict): for d in flattenDict(value, pre + [key], values): yield d diff --git a/mozdef_util/mozdef_util/query_models/aggregated_results.py b/mozdef_util/mozdef_util/query_models/aggregated_results.py index 996acad00..6fd2461c5 100644 --- a/mozdef_util/mozdef_util/query_models/aggregated_results.py +++ b/mozdef_util/mozdef_util/query_models/aggregated_results.py @@ -23,7 +23,7 @@ def AggregatedResults(input_results): } converted_results['hits'].append(hit_dict) - for agg_name, aggregation in input_results.aggregations.to_dict().iteritems(): + for agg_name, aggregation in input_results.aggregations.to_dict().items(): aggregation_dict = { 'terms': [] } diff --git a/mozdef_util/mozdef_util/utilities/dict2List.py b/mozdef_util/mozdef_util/utilities/dict2List.py index ed1597fd8..af5a7a139 100644 --- a/mozdef_util/mozdef_util/utilities/dict2List.py +++ b/mozdef_util/mozdef_util/utilities/dict2List.py @@ -3,7 +3,7 @@ def dict2List(inObj): return a list of the dict keys and values ''' if isinstance(inObj, dict): - for key, value in inObj.iteritems(): + for key, value in inObj.items(): if isinstance(value, dict): for d in dict2List(value): yield d diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index a64f7bf97..7356fdd57 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -160,7 +160,7 @@ def keyMapping(aDict): returndict['receivedtimestamp'] = toUTC(datetime.now()).isoformat() returndict['mozdefhostname'] = options.mozdefhostname try: - for k, v in aDict.iteritems(): + for k, v in aDict.items(): k = removeAt(k).lower() if k == 'sourceip': @@ -228,7 +228,7 @@ def keyMapping(aDict): returndict[u'details'][u'message'] = v else: if len(v) > 0: - for details_key, details_value in v.iteritems(): + for details_key, details_value in v.items(): returndict[u'details'][details_key] = details_value # custom fields/details as a one off, not in an array diff --git a/mq/esworker_eventtask.py b/mq/esworker_eventtask.py index af69d81eb..817181328 100755 --- a/mq/esworker_eventtask.py +++ b/mq/esworker_eventtask.py @@ -51,7 +51,7 @@ def keyMapping(aDict): returndict['mozdefhostname'] = options.mozdefhostname returndict[u'details'] = {} try: - for k, v in aDict.iteritems(): + for k, v in aDict.items(): k = removeAt(k).lower() if k == 'sourceip': diff --git a/mq/esworker_papertrail.py b/mq/esworker_papertrail.py index 67ff03460..b4a6bbc18 100755 --- a/mq/esworker_papertrail.py +++ b/mq/esworker_papertrail.py @@ -120,7 +120,7 @@ def keyMapping(aDict): returndict['mozdefhostname'] = options.mozdefhostname returndict['details'] = {} try: - for k, v in aDict.iteritems(): + for k, v in aDict.items(): k = removeAt(k).lower() if k in ('message', 'summary'): @@ -170,7 +170,7 @@ def keyMapping(aDict): returndict[u'details'][u'message'] = v else: if len(v) > 0: - for details_key, details_value in v.iteritems(): + for details_key, details_value in v.items(): returndict[u'details'][details_key] = details_value # custom fields/details as a one off, not in an array diff --git a/mq/esworker_sns_sqs.py b/mq/esworker_sns_sqs.py index 8642f6085..bdca5ae8e 100755 --- a/mq/esworker_sns_sqs.py +++ b/mq/esworker_sns_sqs.py @@ -100,11 +100,11 @@ def on_message(self, message): event['severity'] = 'INFO' event['details'] = {} - for message_key, message_value in message.iteritems(): + for message_key, message_value in message.items(): if 'Message' == message_key: try: message_json = json.loads(message_value) - for inside_message_key, inside_message_value in message_json.iteritems(): + for inside_message_key, inside_message_value in message_json.items(): if inside_message_key in ('type', 'category'): event['category'] = inside_message_value # add type subcategory for filtering after @@ -131,7 +131,7 @@ def on_message(self, message): event[u'details'][u'message'] = inside_message_value else: if len(inside_message_value) > 0: - for details_key, details_value in inside_message_value.iteritems(): + for details_key, details_value in inside_message_value.items(): event[u'details'][details_key] = details_value else: event['details'][inside_message_key] = inside_message_value diff --git a/mq/esworker_sqs.py b/mq/esworker_sqs.py index 7e4e90ea1..7564c13de 100755 --- a/mq/esworker_sqs.py +++ b/mq/esworker_sqs.py @@ -59,7 +59,7 @@ def keyMapping(aDict): returndict['mozdefhostname'] = options.mozdefhostname returndict['details'] = {} try: - for k, v in aDict.iteritems(): + for k, v in aDict.items(): k = removeAt(k).lower() if k in ('message', 'summary'): @@ -109,7 +109,7 @@ def keyMapping(aDict): returndict[u'details'][u'message'] = v else: if len(v) > 0: - for details_key, details_value in v.iteritems(): + for details_key, details_value in v.items(): returndict[u'details'][details_key] = details_value # custom fields/details as a one off, not in an array diff --git a/mq/plugins/auditdFixup.py b/mq/plugins/auditdFixup.py index 016c8e4d5..c87299b95 100644 --- a/mq/plugins/auditdFixup.py +++ b/mq/plugins/auditdFixup.py @@ -90,7 +90,7 @@ def onMessage(self, message, metadata): if 'ses' in message['details'] and message['details']['ses'] == "4294967295": message['details']['ses'] = '-1' # fix '(null)' string records to fit in a long - for k, v in message['details'].iteritems(): + for k, v in message['details'].items(): if v == '(null)' and 'id' in k: message['details'][k] = -1 diff --git a/tests/alerts/alert_test_suite.py b/tests/alerts/alert_test_suite.py index bae838fd7..c63c21f55 100644 --- a/tests/alerts/alert_test_suite.py +++ b/tests/alerts/alert_test_suite.py @@ -127,7 +127,7 @@ def dict_merge(self, target, *args): obj = args[0] if not isinstance(obj, dict): return obj - for k, v in obj.iteritems(): + for k, v in obj.items(): if k in target and isinstance(target[k], dict): self.dict_merge(target[k], v) else: @@ -217,7 +217,7 @@ def verify_expected_alert(self, found_alert, test_case): assert type(found_alert['_source']['events']) == list, 'Alert events field is not a list' # Verify that the alert properties are set correctly - for key, value in test_case.expected_alert.iteritems(): + for key, value in test_case.expected_alert.items(): assert found_alert['_source'][key] == value, u'{0} does not match!\n\tgot: {1}\n\texpected: {2}'.format(key, found_alert['_source'][key], value) def verify_alert_task(self, alert_task, test_case): diff --git a/tests/mozdef_util/query_models/query_test_suite.py b/tests/mozdef_util/query_models/query_test_suite.py index 56a607796..eeb8a36de 100644 --- a/tests/mozdef_util/query_models/query_test_suite.py +++ b/tests/mozdef_util/query_models/query_test_suite.py @@ -24,7 +24,7 @@ def verify_test(self, query_result, positive_test): assert len(query_result['hits']) is 0 def test_query_class(self): - for query, events in self.query_tests().iteritems(): + for query, events in self.query_tests().items(): for event in events: if pytest.config.option.delete_indexes: self.reset_elasticsearch() diff --git a/tests/unit_test_suite.py b/tests/unit_test_suite.py index 31e7069ac..a973ae94e 100644 --- a/tests/unit_test_suite.py +++ b/tests/unit_test_suite.py @@ -106,7 +106,7 @@ def generate_default_event(self): def verify_event(self, event, expected_event): assert sorted(event.keys()) == sorted(expected_event.keys()) - for key, value in expected_event.iteritems(): + for key, value in expected_event.tems(): if key in ('receivedtimestamp', 'timestamp', 'utctimestamp'): assert type(event[key]) == unicode else: From 827e99ffa64efa365918a10b511b87b4b2aa3588 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 17:10:27 -0500 Subject: [PATCH 13/63] Convert keys function return to list --- .../mozdef_util/elasticsearch_client.py | 6 +-- mq/esworker_papertrail.py | 2 +- mq/plugins/github_webhooks.py | 2 +- tests/alerts/alert_test_suite.py | 6 +-- .../query_models/test_aggregation.py | 37 ++++++++-------- .../query_models/test_search_query.py | 42 +++++++++---------- .../mozdef_util/test_elasticsearch_client.py | 3 +- tests/mozdef_util/utilities/test_dot_dict.py | 2 +- tests/mq/test_esworker_eventtask.py | 2 +- tests/rest/test_rest_index.py | 6 +-- 10 files changed, 53 insertions(+), 55 deletions(-) diff --git a/mozdef_util/mozdef_util/elasticsearch_client.py b/mozdef_util/mozdef_util/elasticsearch_client.py index 08b07bb63..44e9a3a2c 100644 --- a/mozdef_util/mozdef_util/elasticsearch_client.py +++ b/mozdef_util/mozdef_util/elasticsearch_client.py @@ -53,7 +53,7 @@ def delete_index(self, index_name, ignore_fail=False): self.es_connection.indices.delete(index=index_name, ignore=ignore_codes) def get_indices(self): - return self.es_connection.indices.stats()['indices'].keys() + return list(self.es_connection.indices.stats()['indices'].keys()) def index_exists(self, index_name): return self.es_connection.indices.exists(index_name) @@ -90,10 +90,10 @@ def create_alias_multiple_indices(self, alias_name, indices): self.es_connection.indices.update_aliases(dict(actions=actions)) def get_alias(self, alias_name): - return self.es_connection.indices.get_alias(index='*', name=alias_name).keys() + return list(self.es_connection.indices.get_alias(index='*', name=alias_name).keys()) def get_aliases(self): - return self.es_connection.cat.stats()['indices'].keys() + return list(self.es_connection.cat.stats()['indices'].keys()) def refresh(self, index_name): self.es_connection.indices.refresh(index=index_name) diff --git a/mq/esworker_papertrail.py b/mq/esworker_papertrail.py index b4a6bbc18..7ba9b58b2 100755 --- a/mq/esworker_papertrail.py +++ b/mq/esworker_papertrail.py @@ -98,7 +98,7 @@ def request(self, query, stime, etime): break # cache event ids we return to allow for some duplicate filtering checks # during next run - self._evidcache = self._events.keys() + self._evidcache = list(self._events.keys()) return self._events diff --git a/mq/plugins/github_webhooks.py b/mq/plugins/github_webhooks.py index 2b1b903e4..a6bfa8e81 100644 --- a/mq/plugins/github_webhooks.py +++ b/mq/plugins/github_webhooks.py @@ -21,7 +21,7 @@ def __init__(self): mapping_map = f.read() yap = yaml.safe_load(mapping_map) - self.eventtypes = yap.keys() + self.eventtypes = list(yap.keys()) self.yap = yap del(mapping_map) diff --git a/tests/alerts/alert_test_suite.py b/tests/alerts/alert_test_suite.py index c63c21f55..e0dee6db3 100644 --- a/tests/alerts/alert_test_suite.py +++ b/tests/alerts/alert_test_suite.py @@ -193,10 +193,10 @@ def verify_expected_alert(self, found_alert, test_case): assert found_alert['_index'] == self.alert_index_name, 'Alert index not propertly set, got: {}'.format(found_alert['_index']) # Verify that the alert has the right "look to it" - assert found_alert.keys() == ['_score', '_id', '_source', '_index'], 'Alert format is malformed' + assert sorted(found_alert.keys()) == ['_id', '_index', '_score', '_source'], 'Alert format is malformed' - # Verify the alert has an id field that is unicode - assert type(found_alert['_id']) == unicode, 'Alert _id is not an integer' + # Verify the alert has an id field that is str + assert type(found_alert['_id']) == str, 'Alert _id is malformed' # Verify there is a utctimestamp field assert 'utctimestamp' in found_alert['_source'], 'Alert does not have utctimestamp specified' diff --git a/tests/mozdef_util/query_models/test_aggregation.py b/tests/mozdef_util/query_models/test_aggregation.py index 0eee4bdd9..58f8d4e7c 100644 --- a/tests/mozdef_util/query_models/test_aggregation.py +++ b/tests/mozdef_util/query_models/test_aggregation.py @@ -31,11 +31,11 @@ def test_simple_aggregation_source_field(self): search_query.add_aggregation(Aggregation('source')) results = search_query.execute(self.es_client) - assert results['aggregations'].keys() == ['source'] + assert list(results['aggregations'].keys()) == ['source'] - assert results['aggregations']['source'].keys() == ['terms'] + assert list(results['aggregations']['source'].keys()) == ['terms'] assert len(results['aggregations']['source']['terms']) == 3 - assert results['aggregations']['source']['terms'][0].keys() == ['count', 'key'] + assert list(results['aggregations']['source']['terms'][0].keys()) == ['count', 'key'] assert results['aggregations']['source']['terms'][0]['count'] == 2 assert results['aggregations']['source']['terms'][0]['key'] == 'anothersource1' @@ -63,11 +63,11 @@ def test_simple_aggregation_note_field(self): search_query.add_aggregation(Aggregation('note')) results = search_query.execute(self.es_client) - assert results['aggregations'].keys() == ['note'] + assert list(results['aggregations'].keys()) == ['note'] - assert results['aggregations']['note'].keys() == ['terms'] + assert list(results['aggregations']['note'].keys()) == ['terms'] assert len(results['aggregations']['note']['terms']) == 3 - assert results['aggregations']['note']['terms'][0].keys() == ['count', 'key'] + assert list(results['aggregations']['note']['terms'][0].keys()) == ['count', 'key'] assert results['aggregations']['note']['terms'][0]['count'] == 2 assert results['aggregations']['note']['terms'][0]['key'] == 'abvc' @@ -95,13 +95,12 @@ def test_multiple_aggregations(self): search_query.add_aggregation(Aggregation('test')) results = search_query.execute(self.es_client) - aggregation_keys = results['aggregations'].keys() - aggregation_keys.sort() + aggregation_keys = sorted(results['aggregations'].keys()) assert aggregation_keys == ['note', 'test'] - assert results['aggregations']['note'].keys() == ['terms'] + assert list(results['aggregations']['note'].keys()) == ['terms'] assert len(results['aggregations']['note']['terms']) == 2 - assert results['aggregations']['note']['terms'][0].keys() == ['count', 'key'] + assert list(results['aggregations']['note']['terms'][0].keys()) == ['count', 'key'] assert results['aggregations']['note']['terms'][0]['count'] == 2 assert results['aggregations']['note']['terms'][0]['key'] == 'abvc' @@ -109,9 +108,9 @@ def test_multiple_aggregations(self): assert results['aggregations']['note']['terms'][1]['count'] == 1 assert results['aggregations']['note']['terms'][1]['key'] == 'think' - assert results['aggregations']['test'].keys() == ['terms'] + assert list(results['aggregations']['test'].keys()) == ['terms'] assert len(results['aggregations']['test']['terms']) == 1 - assert results['aggregations']['test']['terms'][0].keys() == ['count', 'key'] + assert list(results['aggregations']['test']['terms'][0].keys()) == ['count', 'key'] assert results['aggregations']['test']['terms'][0]['count'] == 4 assert results['aggregations']['test']['terms'][0]['key'] == 'value' @@ -132,11 +131,11 @@ def test_aggregation_non_existing_term(self): search_query.add_aggregation(Aggregation('example')) results = search_query.execute(self.es_client) - assert results.keys() == ['hits', 'meta', 'aggregations'] + assert list(results.keys()) == ['hits', 'meta', 'aggregations'] assert len(results['hits']) == 4 - assert results['aggregations'].keys() == ['example'] + assert list(results['aggregations'].keys()) == ['example'] - assert results['aggregations']['example'].keys() == ['terms'] + assert list(results['aggregations']['example'].keys()) == ['terms'] assert results['aggregations']['example']['terms'] == [] def test_aggregation_multiple_layers(self): @@ -164,8 +163,8 @@ def test_aggregation_multiple_layers(self): search_query.add_aggregation(Aggregation('details.ip')) results = search_query.execute(self.es_client) - assert results['aggregations'].keys() == ['details.ip'] - assert results['aggregations']['details.ip'].keys() == ['terms'] + assert list(results['aggregations'].keys()) == ['details.ip'] + assert list(results['aggregations']['details.ip'].keys()) == ['terms'] assert len(results['aggregations']['details.ip']['terms']) == 2 assert results['aggregations']['details.ip']['terms'][0]['count'] == 2 @@ -190,8 +189,8 @@ def test_aggregation_non_existing_layers_term(self): search_query.add_aggregation(Aggregation('details.ipinformation')) results = search_query.execute(self.es_client) - assert results['aggregations'].keys() == ['details.ipinformation'] - assert results['aggregations']['details.ipinformation'].keys() == ['terms'] + assert list(results['aggregations'].keys()) == ['details.ipinformation'] + assert list(results['aggregations']['details.ipinformation'].keys()) == ['terms'] assert len(results['aggregations']['details.ipinformation']['terms']) == 0 def test_aggregation_with_default_size(self): diff --git a/tests/mozdef_util/query_models/test_search_query.py b/tests/mozdef_util/query_models/test_search_query.py index 86ddd5e39..0bbf4e216 100644 --- a/tests/mozdef_util/query_models/test_search_query.py +++ b/tests/mozdef_util/query_models/test_search_query.py @@ -141,15 +141,15 @@ def test_complex_aggregation_query_execute(self): self.refresh(self.event_index_name) results = query.execute(self.es_client) - assert results.keys() == ['hits', 'meta', 'aggregations'] - assert results['meta'].keys() == ['timed_out'] + assert list(results.keys()) == ['hits', 'meta', 'aggregations'] + assert list(results['meta'].keys()) == ['timed_out'] assert results['meta']['timed_out'] is False sorted_hits = sorted(results['hits'], key=lambda k: k['_source']['ip']) assert len(sorted_hits) == 3 - assert sorted_hits[0].keys() == ['_score', '_id', '_source', '_index'] + assert list(sorted_hits[0].keys()) == ['_score', '_id', '_source', '_index'] assert type(sorted_hits[0]['_id']) == unicode assert sorted_hits[0]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -158,10 +158,10 @@ def test_complex_aggregation_query_execute(self): assert sorted_hits[0]['_source']['summary'] == 'Test Summary' assert sorted_hits[1]['_source']['type'] == 'event' - assert sorted_hits[0]['_source']['details'].keys() == ['information'] + assert list(sorted_hits[0]['_source']['details'].keys()) == ['information'] assert sorted_hits[0]['_source']['details']['information'] == 'Example information' - assert sorted_hits[1].keys() == ['_score', '_id', '_source', '_index'] + assert list(sorted_hits[1].keys()) == ['_score', '_id', '_source', '_index'] assert type(sorted_hits[1]['_id']) == unicode assert sorted_hits[1]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -170,7 +170,7 @@ def test_complex_aggregation_query_execute(self): assert sorted_hits[1]['_source']['summary'] == 'Test Summary' assert sorted_hits[1]['_source']['type'] == 'event' - assert sorted_hits[1]['_source']['details'].keys() == ['information'] + assert list(sorted_hits[1]['_source']['details'].keys()) == ['information'] assert sorted_hits[1]['_source']['details']['information'] == 'Example information' assert type(sorted_hits[2]['_id']) == unicode @@ -181,12 +181,12 @@ def test_complex_aggregation_query_execute(self): assert sorted_hits[2]['_source']['summary'] == 'Test Summary' assert sorted_hits[2]['_source']['type'] == 'event' - assert sorted_hits[2]['_source']['details'].keys() == ['information'] + assert list(sorted_hits[2]['_source']['details'].keys()) == ['information'] assert sorted_hits[2]['_source']['details']['information'] == 'Example information' - assert results['aggregations'].keys() == ['ip'] + assert list(results['aggregations'].keys()) == ['ip'] - assert results['aggregations']['ip'].keys() == ['terms'] + assert list(results['aggregations']['ip'].keys()) == ['terms'] assert len(results['aggregations']['ip']['terms']) == 2 @@ -221,13 +221,13 @@ def test_aggregation_query_execute(self): self.refresh(self.event_index_name) results = query.execute(self.es_client) - assert results.keys() == ['hits', 'meta', 'aggregations'] - assert results['meta'].keys() == ['timed_out'] + assert list(results.keys()) == ['hits', 'meta', 'aggregations'] + assert list(results['meta'].keys()) == ['timed_out'] assert results['meta']['timed_out'] is False assert len(results['hits']) == 2 - assert results['hits'][0].keys() == ['_score', '_id', '_source', '_index'] + assert list(results['hits'][0].keys()) == ['_score', '_id', '_source', '_index'] assert type(results['hits'][0]['_id']) == unicode assert results['hits'][0]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -236,10 +236,10 @@ def test_aggregation_query_execute(self): assert results['hits'][0]['_source']['summary'] == 'Test Summary' assert results['hits'][0]['_source']['type'] == 'event' - assert results['hits'][0]['_source']['details'].keys() == ['information'] + assert list(results['hits'][0]['_source']['details'].keys()) == ['information'] assert results['hits'][0]['_source']['details']['information'] == 'Example information' - assert results['hits'][1].keys() == ['_score', '_id', '_source', '_index'] + assert list(results['hits'][1].keys()) == ['_score', '_id', '_source', '_index'] assert type(results['hits'][1]['_id']) == unicode assert results['hits'][1]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -248,12 +248,12 @@ def test_aggregation_query_execute(self): assert results['hits'][1]['_source']['summary'] == 'Test Summary' assert results['hits'][1]['_source']['type'] == 'event' - assert results['hits'][1]['_source']['details'].keys() == ['information'] + assert list(results['hits'][1]['_source']['details'].keys()) == ['information'] assert results['hits'][1]['_source']['details']['information'] == 'Example information' - assert results['aggregations'].keys() == ['note'] + assert list(results['aggregations'].keys()) == ['note'] - assert results['aggregations']['note'].keys() == ['terms'] + assert list(results['aggregations']['note'].keys()) == ['terms'] assert len(results['aggregations']['note']['terms']) == 1 @@ -271,12 +271,12 @@ def test_simple_query_execute(self): results = query.execute(self.es_client) - assert results.keys() == ['hits', 'meta'] - assert results['meta'].keys() == ['timed_out'] + assert list(results.keys()) == ['hits', 'meta'] + assert list(results['meta'].keys()) == ['timed_out'] assert results['meta']['timed_out'] is False assert len(results['hits']) == 1 - assert results['hits'][0].keys() == ['_score', '_id', '_source', '_index'] + assert list(results['hits'][0].keys()) == ['_score', '_id', '_source', '_index'] assert type(results['hits'][0]['_id']) == unicode assert results['hits'][0]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -285,7 +285,7 @@ def test_simple_query_execute(self): assert results['hits'][0]['_source']['summary'] == 'Test Summary' assert results['hits'][0]['_source']['type'] == 'event' - assert results['hits'][0]['_source']['details'].keys() == ['information'] + assert list(results['hits'][0]['_source']['details'].keys()) == ['information'] assert results['hits'][0]['_source']['details']['information'] == 'Example information' with pytest.raises(KeyError): diff --git a/tests/mozdef_util/test_elasticsearch_client.py b/tests/mozdef_util/test_elasticsearch_client.py index b1464ec3c..52a63fb9e 100644 --- a/tests/mozdef_util/test_elasticsearch_client.py +++ b/tests/mozdef_util/test_elasticsearch_client.py @@ -435,8 +435,7 @@ class TestClusterHealth(ElasticsearchClientTest): def test_cluster_health_results(self): health_results = self.es_client.get_cluster_health() - health_keys = health_results.keys() - health_keys.sort() + health_keys = sorted(health_results.keys()) assert health_keys == ['active_primary_shards', 'active_shards', 'cluster_name', 'initializing_shards', 'number_of_data_nodes', 'number_of_nodes', 'relocating_shards', 'status', 'timed_out', 'unassigned_shards'] assert type(health_results['active_primary_shards']) is int assert type(health_results['active_shards']) is int diff --git a/tests/mozdef_util/utilities/test_dot_dict.py b/tests/mozdef_util/utilities/test_dot_dict.py index 0ba6e450c..7979e6d1e 100644 --- a/tests/mozdef_util/utilities/test_dot_dict.py +++ b/tests/mozdef_util/utilities/test_dot_dict.py @@ -19,7 +19,7 @@ class TestDotDict(UnitTestSuite): def test_blank_init(self): dct = DotDict() - assert dct.keys() == [] + assert list(dct.keys()) == [] def test_nonexisting_key(self): dct = DotDict() diff --git a/tests/mq/test_esworker_eventtask.py b/tests/mq/test_esworker_eventtask.py index 07448224a..034531431 100644 --- a/tests/mq/test_esworker_eventtask.py +++ b/tests/mq/test_esworker_eventtask.py @@ -84,7 +84,7 @@ def test_details_nondict(self): } result = self.key_mapping(message) assert result['summary'] == 'example summary' - assert result['details'].keys() == ['message', 'payload'] + assert list(result['details'].keys()) == ['message', 'payload'] assert result['details']['message'] == 'somestring' assert result['details']['payload'] == 'examplepayload' diff --git a/tests/rest/test_rest_index.py b/tests/rest/test_rest_index.py index 97a9c2e0f..7eba99a43 100644 --- a/tests/rest/test_rest_index.py +++ b/tests/rest/test_rest_index.py @@ -260,7 +260,7 @@ def test_route_endpoints(self): json_resp.sort() - assert json_resp[0].keys() == ['username', 'failures', 'begin', 'end', 'success'] + assert list(json_resp[0].keys()) == ['username', 'failures', 'begin', 'end', 'success'] assert json_resp[0]['username'] == 'qwerty@mozillafoundation.org' assert json_resp[0]['failures'] == 8 assert json_resp[0]['success'] == 3 @@ -269,7 +269,7 @@ def test_route_endpoints(self): assert type(json_resp[0]['end']) == unicode assert parse(json_resp[0]['begin']).tzname() == 'UTC' - assert json_resp[1].keys() == ['username', 'failures', 'begin', 'end', 'success'] + assert list(json_resp[1].keys()) == ['username', 'failures', 'begin', 'end', 'success'] assert json_resp[1]['username'] == 'ttester@mozilla.com' assert json_resp[1]['failures'] == 9 assert json_resp[1]['success'] == 7 @@ -278,7 +278,7 @@ def test_route_endpoints(self): assert type(json_resp[1]['end']) == unicode assert parse(json_resp[1]['begin']).tzname() == 'UTC' - assert json_resp[2].keys() == ['username', 'failures', 'begin', 'end', 'success'] + assert list(json_resp[2].keys()) == ['username', 'failures', 'begin', 'end', 'success'] assert json_resp[2]['username'] == 'ttesterson@mozilla.com' assert json_resp[2]['failures'] == 10 assert json_resp[2]['success'] == 5 From 401ea8a6eeb3d6c55f5bd19364d4bce8403114bc Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 17:12:17 -0500 Subject: [PATCH 14/63] Rename xrange function --- tests/alerts/alert_test_suite.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/alerts/alert_test_suite.py b/tests/alerts/alert_test_suite.py index e0dee6db3..dc00c8fa9 100644 --- a/tests/alerts/alert_test_suite.py +++ b/tests/alerts/alert_test_suite.py @@ -241,7 +241,7 @@ def copy(obj): @staticmethod def create_events(default_event, num_events): events = [] - for num in xrange(num_events): + for num in range(num_events): events.append(AlertTestSuite.create_event(default_event)) return events From e30f3f1d69763ff69cdbb3293ddef6768f0e1881 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 17:26:58 -0500 Subject: [PATCH 15/63] Remove call to encode ascii on strings --- alerts/lib/alerttask.py | 14 ++++---------- bot/irc/mozdefbot.py | 2 +- cloudy_mozdef/lambda_layer/build/lib/alerttask.py | 6 +----- cron/google2mozdef.py | 4 ++-- mozdef_util/mozdef_util/utilities/dict2List.py | 8 ++------ mq/plugins/mozilla_location.py | 4 ++-- 6 files changed, 12 insertions(+), 26 deletions(-) diff --git a/alerts/lib/alerttask.py b/alerts/lib/alerttask.py index de523781c..8cbf9a504 100644 --- a/alerts/lib/alerttask.py +++ b/alerts/lib/alerttask.py @@ -344,11 +344,7 @@ def searchEventsAggregated(self, aggregationPath, samplesLimit=5): for i in Counter(aggregationValues).most_common(): idict = {"value": i[0], "count": i[1], "events": [], "allevents": []} for r in results: - if ( - getValueByPath(r["_source"], aggregationPath).encode( - "ascii", "ignore" - ) == i[0] - ): + if getValueByPath(r["_source"], aggregationPath) == i[0]: # copy events detail into this aggregation up to our samples limit if len(idict["events"]) < samplesLimit: idict["events"].append(r) @@ -510,11 +506,9 @@ def tagEventsAlert(self, events, alertResultES): event["_source"]["alert_names"] = [] event["_source"]["alert_names"].append(self.determine_alert_classname()) - self.es.save_event( - index=event["_index"], body=event["_source"], doc_id=event["_id"] - ) - # We refresh here to ensure our changes to the events will show up for the next search query results - self.es.refresh(event["_index"]) + self.es.save_event(index=event["_index"], body=event["_source"], doc_id=event["_id"]) + # We refresh here to ensure our changes to the events will show up for the next search query results + self.es.refresh(event["_index"]) except Exception as e: self.log.error("Error while updating events in ES: {0}".format(e)) diff --git a/bot/irc/mozdefbot.py b/bot/irc/mozdefbot.py index ad185f716..e1d37ec97 100755 --- a/bot/irc/mozdefbot.py +++ b/bot/irc/mozdefbot.py @@ -147,7 +147,7 @@ def formatAlert(jsonDictIn): return colorify('{0}: {1} {2}'.format( severity, colors['blue'] + category + colors['normal'], - summary.encode('ascii', 'replace') + summary )) diff --git a/cloudy_mozdef/lambda_layer/build/lib/alerttask.py b/cloudy_mozdef/lambda_layer/build/lib/alerttask.py index 84e9b3223..678443553 100644 --- a/cloudy_mozdef/lambda_layer/build/lib/alerttask.py +++ b/cloudy_mozdef/lambda_layer/build/lib/alerttask.py @@ -347,11 +347,7 @@ def searchEventsAggregated(self, aggregationPath, samplesLimit=5): for i in Counter(aggregationValues).most_common(): idict = {"value": i[0], "count": i[1], "events": [], "allevents": []} for r in results: - if ( - getValueByPath(r["_source"], aggregationPath).encode( - "ascii", "ignore" - ) == i[0] - ): + if getValueByPath(r["_source"], aggregationPath) == i[0]: # copy events detail into this aggregation up to our samples limit if len(idict["events"]) < samplesLimit: idict["events"].append(r) diff --git a/cron/google2mozdef.py b/cron/google2mozdef.py index d8a7901be..bad8b4fef 100755 --- a/cron/google2mozdef.py +++ b/cron/google2mozdef.py @@ -66,7 +66,7 @@ def flattenDict(inDict, pre=None, values=True): if isinstance(value, str): yield '.'.join(pre) + '.' + key + '=' + str(value) elif isinstance(value, unicode): - yield '.'.join(pre) + '.' + key + '=' + value.encode('ascii', 'ignore') + yield '.'.join(pre) + '.' + key + '=' + value elif value is None: yield '.'.join(pre) + '.' + key + '=None' else: @@ -76,7 +76,7 @@ def flattenDict(inDict, pre=None, values=True): if isinstance(value, str): yield key + '=' + str(value) elif isinstance(value, unicode): - yield key + '=' + value.encode('ascii', 'ignore') + yield key + '=' + value elif value is None: yield key + '=None' else: diff --git a/mozdef_util/mozdef_util/utilities/dict2List.py b/mozdef_util/mozdef_util/utilities/dict2List.py index af5a7a139..7d9461eb5 100644 --- a/mozdef_util/mozdef_util/utilities/dict2List.py +++ b/mozdef_util/mozdef_util/utilities/dict2List.py @@ -8,23 +8,19 @@ def dict2List(inObj): for d in dict2List(value): yield d elif isinstance(value, list): - yield key.encode('ascii', 'ignore').lower() + yield key.lower() for l in dict2List(value): yield l else: - yield key.encode('ascii', 'ignore').lower() + yield key.lower() if isinstance(value, str): yield value.lower() - elif isinstance(value, unicode): - yield value.encode('ascii', 'ignore').lower() else: yield value elif isinstance(inObj, list): for v in inObj: if isinstance(v, str): yield v.lower() - elif isinstance(v, unicode): - yield v.encode('ascii', 'ignore').lower() elif isinstance(v, list): for l in dict2List(v): yield l diff --git a/mq/plugins/mozilla_location.py b/mq/plugins/mozilla_location.py index 4c64cebb9..60c6c2076 100644 --- a/mq/plugins/mozilla_location.py +++ b/mq/plugins/mozilla_location.py @@ -24,7 +24,7 @@ def __init__(self): def onMessage(self, message, metadata): if 'details' in message and 'hostname' in message['details']: - hostnamesplit = str.lower(message['details']['hostname'].encode('ascii', 'ignore')).split('.') + hostnamesplit = str.lower(message['details']['hostname']).split('.') if len(hostnamesplit) == 5: if 'mozilla' == hostnamesplit[-2]: message['details']['site'] = hostnamesplit[-3] @@ -35,7 +35,7 @@ def onMessage(self, message, metadata): else: message['details']['sitetype'] = 'unknown' elif 'hostname' in message: - hostnamesplit = str.lower(message['hostname'].encode('ascii', 'ignore')).split('.') + hostnamesplit = str.lower(message['hostname']).split('.') if len(hostnamesplit) == 5: if 'mozilla' == hostnamesplit[-2]: message['details']['site'] = hostnamesplit[-3] From 3345587f32ece18bbed07d766254a684e0160e64 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 17:35:41 -0500 Subject: [PATCH 16/63] Fixup dashboard geomodel alert action --- alerts/actions/dashboard_geomodel.py | 4 ++-- tests/alerts/actions/test_dashboard_geomodel.py | 14 +++++++------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/alerts/actions/dashboard_geomodel.py b/alerts/actions/dashboard_geomodel.py index 793bd51d6..74c99006e 100644 --- a/alerts/actions/dashboard_geomodel.py +++ b/alerts/actions/dashboard_geomodel.py @@ -107,8 +107,8 @@ def onMessage(self, message): previous_location_str = u'{0}, {1}'.format(previous_city, previous_country) alert_record = { - 'alert_id': b2a_hex(os.urandom(15)), - 'alert_code': b2a_hex(self.alert_classname), + 'alert_id': b2a_hex(os.urandom(15)).decode(), + 'alert_code': b2a_hex(self.alert_classname.encode()).decode(), 'user_id': auth_full_username, 'risk': self.config['risk'], 'summary': summary, diff --git a/tests/alerts/actions/test_dashboard_geomodel.py b/tests/alerts/actions/test_dashboard_geomodel.py index d79c5a128..33610e715 100644 --- a/tests/alerts/actions/test_dashboard_geomodel.py +++ b/tests/alerts/actions/test_dashboard_geomodel.py @@ -103,7 +103,7 @@ def test_malformed_message_bad(self): assert self.test_connect_called is True assert self.test_result_record is None - def test_unicode_location(self): + def test_str_location(self): self.good_message_dict['summary'] = u"ttesterson@mozilla.com NEWCOUNTRY \u0107abcd, \xe4Spain access from 1.2.3.4 (duo) [deviation:12.07010770457331] last activity was from Ottawa, Canada (3763 km away) approx 23.43 hours before" self.good_message_dict['details']['locality_details']['city'] = u'\u0107abcd' self.good_message_dict['details']['locality_details']['country'] = u'\xe4Spain' @@ -112,19 +112,19 @@ def test_unicode_location(self): assert result_message == self.good_message_dict assert self.test_connect_called is True assert self.test_result_record is not None - assert type(result_message['summary']) is unicode - assert type(result_message['details']['locality_details']['city']) is unicode - assert type(result_message['details']['locality_details']['country']) is unicode + assert type(result_message['summary']) is str + assert type(result_message['details']['locality_details']['city']) is str + assert type(result_message['details']['locality_details']['country']) is str - def test_unicode_username(self): + def test_str_username(self): self.good_message_dict['details']['principal'] = u'\xfcttesterson@mozilla.com' assert self.test_result_record is None result_message = self.plugin.onMessage(self.good_message_dict) assert result_message == self.good_message_dict assert self.test_connect_called is True assert self.test_result_record is not None - assert type(result_message['summary']) is unicode - assert type(result_message['details']['principal']) is unicode + assert type(result_message['summary']) is str + assert type(result_message['details']['principal']) is str def test_written_details(self): assert self.test_result_record is None From 3c394a136570f820c311b71c065b427736ce8e1f Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 17:45:21 -0500 Subject: [PATCH 17/63] Update map to list for ipv6 --- alerts/plugins/ip_source_enrichment.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/alerts/plugins/ip_source_enrichment.py b/alerts/plugins/ip_source_enrichment.py index 7a44753a0..84d18312f 100644 --- a/alerts/plugins/ip_source_enrichment.py +++ b/alerts/plugins/ip_source_enrichment.py @@ -8,6 +8,7 @@ import os import re +import functools import netaddr @@ -23,10 +24,11 @@ def _find_ip_addresses(string): ipv6_rx = '(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))' ipv4 = re.findall(ipv4_rx, string) - ipv6 = map( + ipv6_map = map( lambda match: match[0] if isinstance(match, tuple) else match, re.findall(ipv6_rx, string)) + ipv6 = [x for x in ipv6_map] return ipv4 + ipv6 @@ -42,11 +44,11 @@ def find_ips(value): if isinstance(value, list) or isinstance(value, tuple): found = [find_ips(item) for item in value] - return reduce(add, found, []) + return functools.reduce(add, found, []) if isinstance(value, dict): found = [find_ips(item) for item in value.values()] - return reduce(add, found, []) + return functools.reduce(add, found, []) return [] From 89073e14a61e6f56f11deba3b13f058c729e1d13 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 17:59:53 -0500 Subject: [PATCH 18/63] Rename configparser import statement --- examples/demo/sampleData2MozDef.py | 4 ++-- rest/plugins/vpc_blackhole.py | 4 ++-- tests/http_test_suite.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/examples/demo/sampleData2MozDef.py b/examples/demo/sampleData2MozDef.py index 4ad7c5f9d..761eb2b41 100755 --- a/examples/demo/sampleData2MozDef.py +++ b/examples/demo/sampleData2MozDef.py @@ -17,7 +17,7 @@ import requests import time from configlib import getConfig, OptionParser -import ConfigParser +import configparser import glob from datetime import timedelta @@ -43,7 +43,7 @@ def setConfig(option,value,configfile): """write an option/value pair to our config file""" if os.path.isfile(configfile): - config = ConfigParser.ConfigParser() + config = configparser.ConfigParser() configfp=open(configfile,'r') config.readfp(configfp) configfp.close() diff --git a/rest/plugins/vpc_blackhole.py b/rest/plugins/vpc_blackhole.py index 292b4e279..ec99e1024 100644 --- a/rest/plugins/vpc_blackhole.py +++ b/rest/plugins/vpc_blackhole.py @@ -5,7 +5,7 @@ import os import sys -import ConfigParser +import configparser import netaddr from boto3.session import Session @@ -67,7 +67,7 @@ def __init__(self): self.initConfiguration() def initConfiguration(self): - myparser = ConfigParser.ConfigParser() + myparser = configparser.ConfigParser() myparser.read(self.configfile) cur_sections = myparser.sections() for cur_section in cur_sections: diff --git a/tests/http_test_suite.py b/tests/http_test_suite.py index 885b7486f..403518381 100644 --- a/tests/http_test_suite.py +++ b/tests/http_test_suite.py @@ -16,4 +16,4 @@ def test_route_endpoints(self): for route in self.routes: response = self.response_per_route(route) assert response.status_code == self.status_code - assert response.body == self.body + assert response.text == self.body From ff397a19f42ef103e74317e585e294a82161fb81 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 18:00:05 -0500 Subject: [PATCH 19/63] Fixup ordering of json resp for rest api tests --- tests/rest/test_rest_index.py | 34 +++++++++++++++------------------- 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/tests/rest/test_rest_index.py b/tests/rest/test_rest_index.py index 7eba99a43..f4cc8bf88 100644 --- a/tests/rest/test_rest_index.py +++ b/tests/rest/test_rest_index.py @@ -70,8 +70,6 @@ def test_route_endpoints(self): assert type(json_resp) == list assert len(json_resp) == 2 - json_resp.sort() - assert json_resp[1]['id'] == "Example-SSH-Dashboard" assert json_resp[1]['name'] == 'Example SSH Dashboard' @@ -258,33 +256,31 @@ def test_route_endpoints(self): assert type(json_resp) == list assert len(json_resp) == 3 - json_resp.sort() - - assert list(json_resp[0].keys()) == ['username', 'failures', 'begin', 'end', 'success'] - assert json_resp[0]['username'] == 'qwerty@mozillafoundation.org' - assert json_resp[0]['failures'] == 8 - assert json_resp[0]['success'] == 3 - assert type(json_resp[0]['begin']) == unicode + assert sorted(json_resp[0].keys()) == ['begin', 'end', 'failures', 'success', 'username'] + assert json_resp[0]['username'] == 'ttesterson@mozilla.com' + assert json_resp[0]['failures'] == 10 + assert json_resp[0]['success'] == 5 + assert type(json_resp[0]['begin']) == str assert parse(json_resp[0]['begin']).tzname() == 'UTC' - assert type(json_resp[0]['end']) == unicode + assert type(json_resp[0]['end']) == str assert parse(json_resp[0]['begin']).tzname() == 'UTC' - assert list(json_resp[1].keys()) == ['username', 'failures', 'begin', 'end', 'success'] + assert sorted(json_resp[1].keys()) == ['begin', 'end', 'failures', 'success', 'username'] assert json_resp[1]['username'] == 'ttester@mozilla.com' assert json_resp[1]['failures'] == 9 assert json_resp[1]['success'] == 7 - assert type(json_resp[1]['begin']) == unicode + assert type(json_resp[1]['begin']) == str assert parse(json_resp[1]['begin']).tzname() == 'UTC' - assert type(json_resp[1]['end']) == unicode + assert type(json_resp[1]['end']) == str assert parse(json_resp[1]['begin']).tzname() == 'UTC' - assert list(json_resp[2].keys()) == ['username', 'failures', 'begin', 'end', 'success'] - assert json_resp[2]['username'] == 'ttesterson@mozilla.com' - assert json_resp[2]['failures'] == 10 - assert json_resp[2]['success'] == 5 - assert type(json_resp[2]['begin']) == unicode + assert sorted(json_resp[2].keys()) == ['begin', 'end', 'failures', 'success', 'username'] + assert json_resp[2]['username'] == 'qwerty@mozillafoundation.org' + assert json_resp[2]['failures'] == 8 + assert json_resp[2]['success'] == 3 + assert type(json_resp[2]['begin']) == str assert parse(json_resp[2]['begin']).tzname() == 'UTC' - assert type(json_resp[2]['end']) == unicode + assert type(json_resp[2]['end']) == str assert parse(json_resp[2]['begin']).tzname() == 'UTC' # Routes left need to have unit tests written for: From d8404b55090d054991f7fb91f65fed93237ec92b Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 18:07:03 -0500 Subject: [PATCH 20/63] Fixup to_unicode file --- mozdef_util/mozdef_util/utilities/to_unicode.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/mozdef_util/mozdef_util/utilities/to_unicode.py b/mozdef_util/mozdef_util/utilities/to_unicode.py index b8122adf6..7fd0321db 100644 --- a/mozdef_util/mozdef_util/utilities/to_unicode.py +++ b/mozdef_util/mozdef_util/utilities/to_unicode.py @@ -1,8 +1,4 @@ def toUnicode(obj, encoding='utf-8'): - if type(obj) in [int, long, float, complex]: - # likely a number, convert it to string to get to unicode - obj = str(obj) - if isinstance(obj, basestring): - if not isinstance(obj, unicode): - obj = unicode(obj, encoding) + if not isinstance(obj, str): + obj = str(obj, encoding) return obj From 9e736c3b80d2f29c6a764dfe82dbc70abbdacbf6 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 18:12:04 -0500 Subject: [PATCH 21/63] Fixup mq tests --- mq/plugins/large_strings.py | 8 ++++---- mq/plugins/vulnerability.py | 4 ++-- tests/mq/test_esworker_eventtask.py | 2 +- tests/unit_test_suite.py | 4 ++-- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/mq/plugins/large_strings.py b/mq/plugins/large_strings.py index fb926149a..7d9eecd5a 100644 --- a/mq/plugins/large_strings.py +++ b/mq/plugins/large_strings.py @@ -13,25 +13,25 @@ def __init__(self): def onMessage(self, message, metadata): if 'details' in message: if 'message' in message['details']: - if type(message['details']['message']) in (str, unicode) \ + if type(message['details']['message']) is str \ and len(message['details']['message']) > self.MAX_STRING_LENGTH: message['details']['message'] = message['details']['message'][:self.MAX_STRING_LENGTH] message['details']['message'] += ' ...' if 'cmdline' in message['details']: - if type(message['details']['cmdline']) in (str, unicode) \ + if type(message['details']['cmdline']) is str \ and len(message['details']['cmdline']) > self.MAX_STRING_LENGTH: message['details']['cmdline'] = message['details']['cmdline'][:self.MAX_STRING_LENGTH] message['details']['cmdline'] += ' ...' if 'pr_body' in message['details']: - if type(message['details']['pr_body']) in (str, unicode) \ + if type(message['details']['pr_body']) is str \ and len(message['details']['pr_body']) > self.MAX_STRING_LENGTH: message['details']['pr_body'] = message['details']['pr_body'][:self.MAX_STRING_LENGTH] message['details']['pr_body'] += ' ...' if 'summary' in message: - if type(message['summary']) in (str, unicode) \ + if type(message['summary']) is str \ and len(message['summary']) > self.MAX_STRING_LENGTH: message['summary'] = message['summary'][:self.MAX_STRING_LENGTH] message['summary'] += ' ...' diff --git a/mq/plugins/vulnerability.py b/mq/plugins/vulnerability.py index 73eaa2ef1..54ef00643 100644 --- a/mq/plugins/vulnerability.py +++ b/mq/plugins/vulnerability.py @@ -67,14 +67,14 @@ def calculate_id_v1(self, message): s = '{0}|{1}|{2}'.format( message['asset']['assetid'], message['vuln']['vulnid'], message['sourcename']) - return hashlib.md5(s).hexdigest() + return hashlib.md5(s.encode()).hexdigest() def calculate_id_v2(self, message): s = '{0}|{1}|{2}|{3}'.format( message['zone'], message['sourcename'], message['asset']['hostname'], message['asset']['ipaddress']) - return hashlib.md5(s).hexdigest() + return hashlib.md5(s.encode()).hexdigest() def onMessage(self, message, metadata): if 'type' not in message or message['type'] != 'vulnerability': diff --git a/tests/mq/test_esworker_eventtask.py b/tests/mq/test_esworker_eventtask.py index 034531431..bb642a9d8 100644 --- a/tests/mq/test_esworker_eventtask.py +++ b/tests/mq/test_esworker_eventtask.py @@ -84,7 +84,7 @@ def test_details_nondict(self): } result = self.key_mapping(message) assert result['summary'] == 'example summary' - assert list(result['details'].keys()) == ['message', 'payload'] + assert sorted(result['details'].keys()) == ['message', 'payload'] assert result['details']['message'] == 'somestring' assert result['details']['payload'] == 'examplepayload' diff --git a/tests/unit_test_suite.py b/tests/unit_test_suite.py index a973ae94e..28c2a8ec1 100644 --- a/tests/unit_test_suite.py +++ b/tests/unit_test_suite.py @@ -106,9 +106,9 @@ def generate_default_event(self): def verify_event(self, event, expected_event): assert sorted(event.keys()) == sorted(expected_event.keys()) - for key, value in expected_event.tems(): + for key, value in expected_event.items(): if key in ('receivedtimestamp', 'timestamp', 'utctimestamp'): - assert type(event[key]) == unicode + assert type(event[key]) == str else: assert event[key] == value, 'Incorrect match for {0}, expected: {1}'.format(key, value) From 390a3feef25af15db0484fa37d1a79ffec737bbd Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 18:21:48 -0500 Subject: [PATCH 22/63] Rename unicode type to str --- alerts/alert_actions_worker.py | 2 +- bot/irc/mozdefbot.py | 2 +- bot/slack/mozdefbot.py | 2 +- cron/auth02mozdef.py | 4 ++-- cron/google2mozdef.py | 4 ---- mq/esworker_cloudtrail.py | 8 ++++---- mq/esworker_eventtask.py | 8 ++++---- mq/esworker_papertrail.py | 8 ++++---- mq/esworker_sqs.py | 8 ++++---- mq/plugins/customDocType.py | 3 +-- mq/plugins/fxaFixup.py | 2 +- rest/index.py | 2 +- tests/conftest.py | 2 +- tests/mozdef_util/query_models/test_exists_match.py | 8 ++++---- tests/mozdef_util/query_models/test_search_query.py | 12 ++++++------ tests/mozdef_util/test_elasticsearch_client.py | 4 ++-- 16 files changed, 37 insertions(+), 42 deletions(-) diff --git a/alerts/alert_actions_worker.py b/alerts/alert_actions_worker.py index 4bf7422c1..206b8164c 100644 --- a/alerts/alert_actions_worker.py +++ b/alerts/alert_actions_worker.py @@ -47,7 +47,7 @@ def on_message(self, body, message): # just to be safe..check what we were sent. if isinstance(body, dict): bodyDict = body - elif isinstance(body, str) or isinstance(body, unicode): + elif isinstance(body, str): try: bodyDict = json.loads(body) # lets assume it's json except ValueError as e: diff --git a/bot/irc/mozdefbot.py b/bot/irc/mozdefbot.py index e1d37ec97..3fcb20d60 100755 --- a/bot/irc/mozdefbot.py +++ b/bot/irc/mozdefbot.py @@ -290,7 +290,7 @@ def on_message(self, body, message): # just to be safe..check what we were sent. if isinstance(body, dict): bodyDict = body - elif isinstance(body, str) or isinstance(body, unicode): + elif isinstance(body, str): try: bodyDict = json.loads(body) # lets assume it's json except ValueError as e: diff --git a/bot/slack/mozdefbot.py b/bot/slack/mozdefbot.py index 1d36169c7..b442d5577 100644 --- a/bot/slack/mozdefbot.py +++ b/bot/slack/mozdefbot.py @@ -49,7 +49,7 @@ def on_message(self, body, message): # just to be safe..check what we were sent. if isinstance(body, dict): body_dict = body - elif isinstance(body, str) or isinstance(body, unicode): + elif isinstance(body, str): try: body_dict = json.loads(body) # lets assume it's json except ValueError as e: diff --git a/cron/auth02mozdef.py b/cron/auth02mozdef.py index 96be095e1..12e54608a 100644 --- a/cron/auth02mozdef.py +++ b/cron/auth02mozdef.py @@ -252,8 +252,8 @@ def byteify(input): return {byteify(key): byteify(value) for key, value in input.items()} elif isinstance(input, list): return [byteify(element) for element in input] - elif isinstance(input, unicode): - return input.encode("utf-8") + elif not isinstance(input, str): + return input.encode() else: return input diff --git a/cron/google2mozdef.py b/cron/google2mozdef.py index bad8b4fef..15d914cac 100755 --- a/cron/google2mozdef.py +++ b/cron/google2mozdef.py @@ -65,8 +65,6 @@ def flattenDict(inDict, pre=None, values=True): if values: if isinstance(value, str): yield '.'.join(pre) + '.' + key + '=' + str(value) - elif isinstance(value, unicode): - yield '.'.join(pre) + '.' + key + '=' + value elif value is None: yield '.'.join(pre) + '.' + key + '=None' else: @@ -75,8 +73,6 @@ def flattenDict(inDict, pre=None, values=True): if values: if isinstance(value, str): yield key + '=' + str(value) - elif isinstance(value, unicode): - yield key + '=' + value elif value is None: yield key + '=None' else: diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index 7356fdd57..5dab31c2a 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -246,11 +246,11 @@ def keyMapping(aDict): # we let them dictate the data type with field_datatype # convention if newName.endswith('_int'): - returndict[u'details'][unicode(newName)] = int(v) + returndict[u'details'][str(newName)] = int(v) elif newName.endswith('_float'): - returndict[u'details'][unicode(newName)] = float(v) + returndict[u'details'][str(newName)] = float(v) else: - returndict[u'details'][unicode(newName)] = toUnicode(v) + returndict[u'details'][str(newName)] = toUnicode(v) else: returndict[u'details'][k] = v @@ -375,7 +375,7 @@ def on_message(self, body): # just to be safe..check what we were sent. if isinstance(body, dict): bodyDict = body - elif isinstance(body, str) or isinstance(body, unicode): + elif isinstance(body, str): try: bodyDict = json.loads(body) # lets assume it's json except ValueError as e: diff --git a/mq/esworker_eventtask.py b/mq/esworker_eventtask.py index 817181328..98b9298a2 100755 --- a/mq/esworker_eventtask.py +++ b/mq/esworker_eventtask.py @@ -127,11 +127,11 @@ def keyMapping(aDict): # we let them dictate the data type with field_datatype # convention if newName.endswith('_int'): - returndict[u'details'][unicode(newName)] = int(v) + returndict[u'details'][str(newName)] = int(v) elif newName.endswith('_float'): - returndict[u'details'][unicode(newName)] = float(v) + returndict[u'details'][str(newName)] = float(v) else: - returndict[u'details'][unicode(newName)] = toUnicode(v) + returndict[u'details'][str(newName)] = toUnicode(v) # nxlog windows log handling if 'Domain' in aDict and 'SourceModuleType' in aDict: @@ -190,7 +190,7 @@ def on_message(self, body, message): # just to be safe..check what we were sent. if isinstance(body, dict): bodyDict = body - elif isinstance(body, str) or isinstance(body, unicode): + elif isinstance(body, str): try: bodyDict = json.loads(body) # lets assume it's json except ValueError as e: diff --git a/mq/esworker_papertrail.py b/mq/esworker_papertrail.py index 7ba9b58b2..3d907bd55 100755 --- a/mq/esworker_papertrail.py +++ b/mq/esworker_papertrail.py @@ -185,11 +185,11 @@ def keyMapping(aDict): # we let them dictate the data type with field_datatype # convention if newName.endswith('_int'): - returndict[u'details'][unicode(newName)] = int(v) + returndict[u'details'][str(newName)] = int(v) elif newName.endswith('_float'): - returndict[u'details'][unicode(newName)] = float(v) + returndict[u'details'][str(newName)] = float(v) else: - returndict[u'details'][unicode(newName)] = toUnicode(v) + returndict[u'details'][str(newName)] = toUnicode(v) # nxlog windows log handling if 'Domain' in aDict and 'SourceModuleType' in aDict: @@ -279,7 +279,7 @@ def on_message(self, body, message): # just to be safe..check what we were sent. if isinstance(body, dict): bodyDict = body - elif isinstance(body, str) or isinstance(body, unicode): + elif isinstance(body, str): try: bodyDict = json.loads(body) # lets assume it's json except ValueError as e: diff --git a/mq/esworker_sqs.py b/mq/esworker_sqs.py index 7564c13de..c00973837 100755 --- a/mq/esworker_sqs.py +++ b/mq/esworker_sqs.py @@ -124,11 +124,11 @@ def keyMapping(aDict): # we let them dictate the data type with field_datatype # convention if newName.endswith('_int'): - returndict[u'details'][unicode(newName)] = int(v) + returndict[u'details'][str(newName)] = int(v) elif newName.endswith('_float'): - returndict[u'details'][unicode(newName)] = float(v) + returndict[u'details'][str(newName)] = float(v) else: - returndict[u'details'][unicode(newName)] = toUnicode(v) + returndict[u'details'][str(newName)] = toUnicode(v) # nxlog windows log handling if 'Domain' in aDict and 'SourceModuleType' in aDict: @@ -249,7 +249,7 @@ def on_message(self, body, message): # just to be safe..check what we were sent. if isinstance(body, dict): bodyDict = body - elif isinstance(body, str) or isinstance(body, unicode): + elif isinstance(body, str): try: bodyDict = json.loads(body) # lets assume it's json except ValueError as e: diff --git a/mq/plugins/customDocType.py b/mq/plugins/customDocType.py index f076e2bd3..dccf12f2d 100644 --- a/mq/plugins/customDocType.py +++ b/mq/plugins/customDocType.py @@ -18,7 +18,6 @@ def onMessage(self, message, metadata): # set the type field for sub-categorical filtering if 'endpoint' in message and 'customendpoint' in message: if message['customendpoint']: - if isinstance(message['endpoint'], str) or \ - isinstance(message['endpoint'], unicode): + if isinstance(message['endpoint'], str): message['type'] = message['endpoint'] return (message, metadata) diff --git a/mq/plugins/fxaFixup.py b/mq/plugins/fxaFixup.py index f037d6592..2f6ee7d57 100644 --- a/mq/plugins/fxaFixup.py +++ b/mq/plugins/fxaFixup.py @@ -89,7 +89,7 @@ def onMessage(self, message, metadata): # handle the case of an escaped list: # "remoteaddresschain": "[\"1.2.3.4\",\"5.6.7.8\",\"127.0.0.1\"]" - if (isinstance(message['details']['remoteaddresschain'], unicode) and + if (isinstance(message['details']['remoteaddresschain'], str) and message['details']['remoteaddresschain'][0] == '[' and message['details']['remoteaddresschain'][-1] == ']'): # remove the brackets and double quotes diff --git a/rest/index.py b/rest/index.py index 2903060bb..1ca918dad 100644 --- a/rest/index.py +++ b/rest/index.py @@ -346,7 +346,7 @@ def createIncident(): return response # Validating Incident phase type - if (type(incident['phase']) not in (str, unicode) or + if (type(incident['phase']) is not str or incident['phase'] not in validIncidentPhases): response.status = 500 diff --git a/tests/conftest.py b/tests/conftest.py index 2ba78f23d..59459833b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -42,4 +42,4 @@ def pytest_configure(config): warning_text += "Continuing the unit test execution in 10 seconds...CANCEL ME IF YOU DO NOT WANT PREVIOUS QUEUES PURGED!!! **\n" print(warning_text) - time.sleep(10) + # time.sleep(10) diff --git a/tests/mozdef_util/query_models/test_exists_match.py b/tests/mozdef_util/query_models/test_exists_match.py index 220341691..be6ed7ee6 100644 --- a/tests/mozdef_util/query_models/test_exists_match.py +++ b/tests/mozdef_util/query_models/test_exists_match.py @@ -6,7 +6,7 @@ class TestExistsMatchPositiveTestSuite(PositiveTestSuite): def query_tests(self): - tests = { + tests = [ ExistsMatch('summary'): [ {'summary': 'test'}, {'summary': 'example test summary'}, @@ -16,13 +16,13 @@ def query_tests(self): 'details': {'note': 'test'} }, ] - } + ] return tests class TestExistsMatchNegativeTestSuite(NegativeTestSuite): def query_tests(self): - tests = { + tests = [ ExistsMatch('summary'): [ {'note': 'example note'}, {'sum': 'example sum'}, @@ -39,5 +39,5 @@ def query_tests(self): # todo: fix code to handle this elegantly # }, ] - } + ] return tests diff --git a/tests/mozdef_util/query_models/test_search_query.py b/tests/mozdef_util/query_models/test_search_query.py index 0bbf4e216..24d8c8a66 100644 --- a/tests/mozdef_util/query_models/test_search_query.py +++ b/tests/mozdef_util/query_models/test_search_query.py @@ -150,7 +150,7 @@ def test_complex_aggregation_query_execute(self): assert len(sorted_hits) == 3 assert list(sorted_hits[0].keys()) == ['_score', '_id', '_source', '_index'] - assert type(sorted_hits[0]['_id']) == unicode + assert type(sorted_hits[0]['_id']) == str assert sorted_hits[0]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -162,7 +162,7 @@ def test_complex_aggregation_query_execute(self): assert sorted_hits[0]['_source']['details']['information'] == 'Example information' assert list(sorted_hits[1].keys()) == ['_score', '_id', '_source', '_index'] - assert type(sorted_hits[1]['_id']) == unicode + assert type(sorted_hits[1]['_id']) == str assert sorted_hits[1]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -173,7 +173,7 @@ def test_complex_aggregation_query_execute(self): assert list(sorted_hits[1]['_source']['details'].keys()) == ['information'] assert sorted_hits[1]['_source']['details']['information'] == 'Example information' - assert type(sorted_hits[2]['_id']) == unicode + assert type(sorted_hits[2]['_id']) == str assert sorted_hits[2]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -228,7 +228,7 @@ def test_aggregation_query_execute(self): assert len(results['hits']) == 2 assert list(results['hits'][0].keys()) == ['_score', '_id', '_source', '_index'] - assert type(results['hits'][0]['_id']) == unicode + assert type(results['hits'][0]['_id']) == str assert results['hits'][0]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -240,7 +240,7 @@ def test_aggregation_query_execute(self): assert results['hits'][0]['_source']['details']['information'] == 'Example information' assert list(results['hits'][1].keys()) == ['_score', '_id', '_source', '_index'] - assert type(results['hits'][1]['_id']) == unicode + assert type(results['hits'][1]['_id']) == str assert results['hits'][1]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -277,7 +277,7 @@ def test_simple_query_execute(self): assert len(results['hits']) == 1 assert list(results['hits'][0].keys()) == ['_score', '_id', '_source', '_index'] - assert type(results['hits'][0]['_id']) == unicode + assert type(results['hits'][0]['_id']) == str assert results['hits'][0]['_index'] == datetime.now().strftime("events-%Y%m%d") diff --git a/tests/mozdef_util/test_elasticsearch_client.py b/tests/mozdef_util/test_elasticsearch_client.py index 52a63fb9e..937d5fcb4 100644 --- a/tests/mozdef_util/test_elasticsearch_client.py +++ b/tests/mozdef_util/test_elasticsearch_client.py @@ -439,12 +439,12 @@ def test_cluster_health_results(self): assert health_keys == ['active_primary_shards', 'active_shards', 'cluster_name', 'initializing_shards', 'number_of_data_nodes', 'number_of_nodes', 'relocating_shards', 'status', 'timed_out', 'unassigned_shards'] assert type(health_results['active_primary_shards']) is int assert type(health_results['active_shards']) is int - assert type(health_results['cluster_name']) is unicode + assert type(health_results['cluster_name']) is str assert type(health_results['initializing_shards']) is int assert type(health_results['number_of_data_nodes']) is int assert type(health_results['number_of_nodes']) is int assert type(health_results['relocating_shards']) is int - assert type(health_results['status']) is unicode + assert type(health_results['status']) is str assert type(health_results['timed_out']) is bool assert type(health_results['unassigned_shards']) is int From d421dbb33f9f43303a73f818558b53ec13075add Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 18:49:38 -0500 Subject: [PATCH 23/63] Fixup mozdef_util query_model tests --- mq/esworker_cloudtrail.py | 2 +- .../query_models/query_test_suite.py | 4 +- .../query_models/test_aggregation.py | 2 +- .../query_models/test_exists_match.py | 45 ++--- .../query_models/test_less_than_match.py | 42 +++-- .../query_models/test_phrase_match.py | 62 ++++--- .../query_models/test_query_string_match.py | 164 ++++++++++-------- .../query_models/test_range_match.py | 30 ++-- .../query_models/test_search_query.py | 25 ++- .../query_models/test_term_match.py | 152 +++++++++------- .../query_models/test_terms_match.py | 51 +++--- .../query_models/test_wildcard_match.py | 68 ++++---- 12 files changed, 359 insertions(+), 288 deletions(-) diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index 5dab31c2a..0cd2cf7f7 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -57,7 +57,7 @@ def __init__(self, region_name='us-east-1', aws_access_key_id=None, aws_secret_a region_name, self.aws_access_key_id, self.aws_secret_access_key)) - except Exception, e: + except Exception as e: logger.error("Unable to connect to STS due to exception {0}".format(e)) raise diff --git a/tests/mozdef_util/query_models/query_test_suite.py b/tests/mozdef_util/query_models/query_test_suite.py index eeb8a36de..28e2b642a 100644 --- a/tests/mozdef_util/query_models/query_test_suite.py +++ b/tests/mozdef_util/query_models/query_test_suite.py @@ -24,7 +24,9 @@ def verify_test(self, query_result, positive_test): assert len(query_result['hits']) is 0 def test_query_class(self): - for query, events in self.query_tests().items(): + for testcase in self.query_tests(): + query = testcase[0] + events = testcase[1] for event in events: if pytest.config.option.delete_indexes: self.reset_elasticsearch() diff --git a/tests/mozdef_util/query_models/test_aggregation.py b/tests/mozdef_util/query_models/test_aggregation.py index 58f8d4e7c..328d1cf26 100644 --- a/tests/mozdef_util/query_models/test_aggregation.py +++ b/tests/mozdef_util/query_models/test_aggregation.py @@ -131,7 +131,7 @@ def test_aggregation_non_existing_term(self): search_query.add_aggregation(Aggregation('example')) results = search_query.execute(self.es_client) - assert list(results.keys()) == ['hits', 'meta', 'aggregations'] + assert sorted(results.keys()) == ['aggregations', 'hits', 'meta'] assert len(results['hits']) == 4 assert list(results['aggregations'].keys()) == ['example'] diff --git a/tests/mozdef_util/query_models/test_exists_match.py b/tests/mozdef_util/query_models/test_exists_match.py index be6ed7ee6..f2fed9bfc 100644 --- a/tests/mozdef_util/query_models/test_exists_match.py +++ b/tests/mozdef_util/query_models/test_exists_match.py @@ -7,14 +7,21 @@ class TestExistsMatchPositiveTestSuite(PositiveTestSuite): def query_tests(self): tests = [ - ExistsMatch('summary'): [ - {'summary': 'test'}, - {'summary': 'example test summary'}, + [ + ExistsMatch('summary'), [ + {'summary': 'test'}, + {'summary': 'example test summary'}, + ] ], - ExistsMatch('details.note'): [ - {'summary': 'garbage summary', - 'details': {'note': 'test'} - }, + [ + ExistsMatch('details.note'), [ + { + 'summary': 'garbage summary', + 'details': { + 'note': 'test' + } + }, + ] ] ] return tests @@ -23,21 +30,17 @@ def query_tests(self): class TestExistsMatchNegativeTestSuite(NegativeTestSuite): def query_tests(self): tests = [ - ExistsMatch('summary'): [ - {'note': 'example note'}, - {'sum': 'example sum'}, - {'details': { - 'note': 'example note'}, - } + [ + ExistsMatch('summary'), [ + {'note': 'example note'}, + {'sum': 'example sum'}, + {'details': {'note': 'example note'}}, + ] ], - ExistsMatch('details.note'): [ - {'summary': 'garbage summary', - 'details': {'ipaddress': 'test'} - }, - # { - # 'details': 'no details', # Currently, this throws MapperParsingException - # todo: fix code to handle this elegantly - # }, + [ + ExistsMatch('details.note'), [ + {'summary': 'garbage summary','details': {'ipaddress': 'test'}}, + ] ] ] return tests diff --git a/tests/mozdef_util/query_models/test_less_than_match.py b/tests/mozdef_util/query_models/test_less_than_match.py index 9010e74c9..e5427395e 100644 --- a/tests/mozdef_util/query_models/test_less_than_match.py +++ b/tests/mozdef_util/query_models/test_less_than_match.py @@ -7,30 +7,34 @@ class TestLessThanMatchPositiveTestSuite(PositiveTestSuite): def query_tests(self): boundry_date = "2016-08-12T21:07:12.316450+00:00" - tests = { - LessThanMatch('utctimestamp', boundry_date): [ - {'utctimestamp': '2015-08-12T21:07:12.316450+00:00'}, - {'utctimestamp': '2016-02-12T21:07:12.316450+00:00'}, - {'utctimestamp': '2016-08-11T21:07:12.316450+00:00'}, - {'utctimestamp': '2016-08-12T20:07:12.316450+00:00'}, - ], - } + tests = [ + [ + LessThanMatch('utctimestamp', boundry_date), [ + {'utctimestamp': '2015-08-12T21:07:12.316450+00:00'}, + {'utctimestamp': '2016-02-12T21:07:12.316450+00:00'}, + {'utctimestamp': '2016-08-11T21:07:12.316450+00:00'}, + {'utctimestamp': '2016-08-12T20:07:12.316450+00:00'}, + ], + ] + ] return tests class TestLessThanMatchNegativeTestSuite(NegativeTestSuite): def query_tests(self): boundry_date = "2016-08-12T21:07:12.316450+00:00" - tests = { - LessThanMatch('utctimestamp', boundry_date): [ - {'utctimestamp': '2017-08-12T21:07:12.316450+00:00'}, - {'utctimestamp': '2016-09-12T21:07:12.316450+00:00'}, - {'utctimestamp': '2016-08-14T21:07:12.316450+00:00'}, - {'utctimestamp': '2016-08-12T23:07:12.316450+00:00'}, - {'utctimestamp': '2016-08-12T21:08:12.316450+00:00'}, - {'utctimestamp': '2016-08-12T21:07:13.316450+00:00'}, - {'utctimestamp': '2016-08-12T21:07:12.416450+00:00'}, - {'utctimestamp': '2016-08-12T21:07:12.316450+00:00'}, + tests = [ + [ + LessThanMatch('utctimestamp', boundry_date), [ + {'utctimestamp': '2017-08-12T21:07:12.316450+00:00'}, + {'utctimestamp': '2016-09-12T21:07:12.316450+00:00'}, + {'utctimestamp': '2016-08-14T21:07:12.316450+00:00'}, + {'utctimestamp': '2016-08-12T23:07:12.316450+00:00'}, + {'utctimestamp': '2016-08-12T21:08:12.316450+00:00'}, + {'utctimestamp': '2016-08-12T21:07:13.316450+00:00'}, + {'utctimestamp': '2016-08-12T21:07:12.416450+00:00'}, + {'utctimestamp': '2016-08-12T21:07:12.316450+00:00'}, + ] ], - } + ] return tests diff --git a/tests/mozdef_util/query_models/test_phrase_match.py b/tests/mozdef_util/query_models/test_phrase_match.py index 368579687..9a19bfe23 100644 --- a/tests/mozdef_util/query_models/test_phrase_match.py +++ b/tests/mozdef_util/query_models/test_phrase_match.py @@ -6,40 +6,52 @@ class TestPhraseMatchPositiveTestSuite(PositiveTestSuite): def query_tests(self): - tests = { - PhraseMatch('summary', 'test run'): [ - {'summary': 'test run'}, - {'summary': 'this is test run source'}, - {'summary': 'this is test run'}, + tests = [ + [ + PhraseMatch('summary', 'test run'), [ + {'summary': 'test run'}, + {'summary': 'this is test run source'}, + {'summary': 'this is test run'}, + ], ], - PhraseMatch('summary', 'test'): [ - {'summary': 'test here'}, - {'summary': 'we are test here source'}, - {'summary': 'this is test'}, + [ + PhraseMatch('summary', 'test'), [ + {'summary': 'test here'}, + {'summary': 'we are test here source'}, + {'summary': 'this is test'}, + ] ], - PhraseMatch('summary', '/test/abc'): [ - {'summary': '/test/abc'}, - {'summary': '/test/abc/def'}, - {'summary': 'path /test/abc'}, + [ + PhraseMatch('summary', '/test/abc'), [ + {'summary': '/test/abc'}, + {'summary': '/test/abc/def'}, + {'summary': 'path /test/abc'}, + ] ], - } + ] return tests class TestPhraseMatchNegativeTestSuite(NegativeTestSuite): def query_tests(self): - tests = { - PhraseMatch('summary', 'test run'): [ - {'summary': 'test sample run'}, - {'notes': 'test run'}, - {'summary': 'example test running'}, + tests = [ + [ + PhraseMatch('summary', 'test run'), [ + {'summary': 'test sample run'}, + {'notes': 'test run'}, + {'summary': 'example test running'}, + ] ], - PhraseMatch('summary', 'test abc'): [ - {'summary': 'example summary test'}, - {'notes': 'we are test here source'}, + [ + PhraseMatch('summary', 'test abc'), [ + {'summary': 'example summary test'}, + {'notes': 'we are test here source'}, + ] ], - PhraseMatch('summary', 'test'): [ - {'summary': 'we are testing'}, + [ + PhraseMatch('summary', 'test'), [ + {'summary': 'we are testing'}, + ] ], - } + ] return tests diff --git a/tests/mozdef_util/query_models/test_query_string_match.py b/tests/mozdef_util/query_models/test_query_string_match.py index 54b9fd7d6..afeb5a788 100644 --- a/tests/mozdef_util/query_models/test_query_string_match.py +++ b/tests/mozdef_util/query_models/test_query_string_match.py @@ -13,94 +13,106 @@ class TestQueryStringMatchPositiveTestSuite(PositiveTestSuite): def query_tests(self): - tests = { - QueryStringMatch('summary: test'): [ - {'summary': 'test'}, + tests = [ + [ + QueryStringMatch('summary: test'), [ + {'summary': 'test'}, + ] ], - - QueryStringMatch('summary: test conf'): [ - {'summary': 'test'}, - {'summary': 'conf'}, - {'summary': 'test conf'}, + [ + QueryStringMatch('summary: test conf'), [ + {'summary': 'test'}, + {'summary': 'conf'}, + {'summary': 'test conf'}, + ] ], - - QueryStringMatch(hostname_test_regex): [ - {'hostname': 'host.groupa.test.def.subdomain.company.com'}, - {'hostname': 'host.groupa.test.def.subdomain.company.com'}, - {'hostname': 'host.groupa.subdomain.domain.company.com'}, - {'hostname': 'host.groupa.subdomain.domain1.company.com'}, - {'hostname': 'host.groupa.subdomain.company.com'}, - {'hostname': 'host1.groupa.subdomain.company.com'}, - {'hostname': 'host1.groupa.test.subdomain.company.com'}, - {'hostname': 'host-1.groupa.test.subdomain.domain.company.com'}, - {'hostname': 'host-v2-test6.groupa.test.subdomain.domain.company.com'}, - {'hostname': 'host1.groupa.subdomain.domain.company.com'}, - {'hostname': 'someotherhost1.hgi.groupa.subdomain.domain1.company.com'}, - {'hostname': 'host2.groupb.subdomain.domain.company.com'}, + [ + QueryStringMatch(hostname_test_regex), [ + {'hostname': 'host.groupa.test.def.subdomain.company.com'}, + {'hostname': 'host.groupa.test.def.subdomain.company.com'}, + {'hostname': 'host.groupa.subdomain.domain.company.com'}, + {'hostname': 'host.groupa.subdomain.domain1.company.com'}, + {'hostname': 'host.groupa.subdomain.company.com'}, + {'hostname': 'host1.groupa.subdomain.company.com'}, + {'hostname': 'host1.groupa.test.subdomain.company.com'}, + {'hostname': 'host-1.groupa.test.subdomain.domain.company.com'}, + {'hostname': 'host-v2-test6.groupa.test.subdomain.domain.company.com'}, + {'hostname': 'host1.groupa.subdomain.domain.company.com'}, + {'hostname': 'someotherhost1.hgi.groupa.subdomain.domain1.company.com'}, + {'hostname': 'host2.groupb.subdomain.domain.company.com'}, + ] ], - - QueryStringMatch(filename_matcher): [ - {'summary': 'test.exe'}, - {'summary': 'test.sh'}, + [ + QueryStringMatch(filename_matcher), [ + {'summary': 'test.exe'}, + {'summary': 'test.sh'}, + ] ], - - QueryStringMatch(ip_matcher): [ - {'destination': 'http://1.2.3.4/somepath'}, - {'destination': 'https://1.2.3.4/somepath'}, - {'destination': '1.2.3.4/somepath'}, - {'destination': '1.2.3.4/somepath'}, - {'destination': '1.2.3.4:443'}, - {'destination': '1.2.3.4:80'}, - # Over-match examples (which need to be validated further in alerts) - {'destination': 'https://foo.bar.baz.com/somepath'}, - {'destination': 'foo.bar.baz.com:80'}, - ] - } + [ + QueryStringMatch(ip_matcher), [ + {'destination': 'http://1.2.3.4/somepath'}, + {'destination': 'https://1.2.3.4/somepath'}, + {'destination': '1.2.3.4/somepath'}, + {'destination': '1.2.3.4/somepath'}, + {'destination': '1.2.3.4:443'}, + {'destination': '1.2.3.4:80'}, + # Over-match examples (which need to be validated further in alerts) + {'destination': 'https://foo.bar.baz.com/somepath'}, + {'destination': 'foo.bar.baz.com:80'}, + ] + ], + ] return tests class TestQueryStringMatchNegativeTestSuite(NegativeTestSuite): def query_tests(self): - tests = { - QueryStringMatch('summary: test'): [ - {'summary': 'example summary'}, - {'summary': 'example summary tes'}, - {'summary': 'testing'}, - {'note': 'test'}, + tests = [ + [ + QueryStringMatch('summary: test'), [ + {'summary': 'example summary'}, + {'summary': 'example summary tes'}, + {'summary': 'testing'}, + {'note': 'test'}, + ] ], - - QueryStringMatch('summary: test conf'): [ - {'summary': 'testing'}, - {'summary': 'configuration'}, - {'summary': 'testing configuration'}, + [ + QueryStringMatch('summary: test conf'), [ + {'summary': 'testing'}, + {'summary': 'configuration'}, + {'summary': 'testing configuration'}, + ] ], - - QueryStringMatch(hostname_test_regex): [ - {'hostname': ''}, - {'hostname': 'host.subdomain.company.com'}, - {'hostname': 'host.subdomain.domain1.company.com'}, - {'hostname': 'groupa.abc.company.com'}, - {'hostname': 'asub.subdomain.company.com'}, - {'hostname': 'example.com'}, - {'hostname': 'abc.company.com'}, - {'hostname': 'host1.groupa.asubdomain.company.com'}, - {'hostname': 'host1.groupa.subdomaina.company.com'}, - {'hostname': 'host1.groupaa.subdomain.company.com'}, - {'hostname': 'host1.agroupb.subdomain.company.com'}, + [ + QueryStringMatch(hostname_test_regex), [ + {'hostname': ''}, + {'hostname': 'host.subdomain.company.com'}, + {'hostname': 'host.subdomain.domain1.company.com'}, + {'hostname': 'groupa.abc.company.com'}, + {'hostname': 'asub.subdomain.company.com'}, + {'hostname': 'example.com'}, + {'hostname': 'abc.company.com'}, + {'hostname': 'host1.groupa.asubdomain.company.com'}, + {'hostname': 'host1.groupa.subdomaina.company.com'}, + {'hostname': 'host1.groupaa.subdomain.company.com'}, + {'hostname': 'host1.agroupb.subdomain.company.com'}, + ] ], - - QueryStringMatch(filename_matcher): [ - {'summary': 'test.exe.abcd'}, - {'summary': 'testexe'}, - {'summary': 'test.1234'}, - {'summary': '.exe.test'}, + [ + QueryStringMatch(filename_matcher), [ + {'summary': 'test.exe.abcd'}, + {'summary': 'testexe'}, + {'summary': 'test.1234'}, + {'summary': '.exe.test'}, + ] ], - - QueryStringMatch(ip_matcher): [ - {'destination': 'https://foo.bar.mozilla.com/somepath'}, - {'destination': 'foo.bar.mozilla.com:80'}, - {'destination': 'http://example.com/somepath'}, - {'destination': 'example.com:443'} + [ + QueryStringMatch(ip_matcher), [ + {'destination': 'https://foo.bar.mozilla.com/somepath'}, + {'destination': 'foo.bar.mozilla.com:80'}, + {'destination': 'http://example.com/somepath'}, + {'destination': 'example.com:443'} + ] ], - } + ] return tests diff --git a/tests/mozdef_util/query_models/test_range_match.py b/tests/mozdef_util/query_models/test_range_match.py index d8061188b..f711ba71a 100644 --- a/tests/mozdef_util/query_models/test_range_match.py +++ b/tests/mozdef_util/query_models/test_range_match.py @@ -8,15 +8,17 @@ class TestRangeMatchPositiveTestSuite(PositiveTestSuite): def query_tests(self): begin_date = "2016-08-12T21:07:12.316450+00:00" end_date = "2016-08-13T21:07:12.316450+00:00" - tests = { - RangeMatch('utctimestamp', begin_date, end_date): [ - {'utctimestamp': '2016-08-12T21:07:12.316450+00:00'}, - {'utctimestamp': '2016-08-12T21:07:13.316450+00:00'}, - {'utctimestamp': '2016-08-12T23:04:12.316450+00:00'}, - {'utctimestamp': '2016-08-13T21:07:11.316450+00:00'}, - {'utctimestamp': '2016-08-13T21:07:12.316450+00:00'}, + tests = [ + [ + RangeMatch('utctimestamp', begin_date, end_date), [ + {'utctimestamp': '2016-08-12T21:07:12.316450+00:00'}, + {'utctimestamp': '2016-08-12T21:07:13.316450+00:00'}, + {'utctimestamp': '2016-08-12T23:04:12.316450+00:00'}, + {'utctimestamp': '2016-08-13T21:07:11.316450+00:00'}, + {'utctimestamp': '2016-08-13T21:07:12.316450+00:00'}, + ] ], - } + ] return tests @@ -24,10 +26,12 @@ class TestRangeMatchNegativeTestSuite(NegativeTestSuite): def query_tests(self): begin_date = "2016-08-12T21:07:12.316450+00:00" end_date = "2016-08-13T21:07:12.316450+00:00" - tests = { - RangeMatch('utctimestamp', begin_date, end_date): [ - {'utctimestamp': '2016-08-12T21:07:11.316450+00:00'}, - {'utctimestamp': '2016-08-13T21:07:13.316450+00:00'}, + tests = [ + [ + RangeMatch('utctimestamp', begin_date, end_date), [ + {'utctimestamp': '2016-08-12T21:07:11.316450+00:00'}, + {'utctimestamp': '2016-08-13T21:07:13.316450+00:00'}, + ] ], - } + ] return tests diff --git a/tests/mozdef_util/query_models/test_search_query.py b/tests/mozdef_util/query_models/test_search_query.py index 24d8c8a66..5da3d5ba3 100644 --- a/tests/mozdef_util/query_models/test_search_query.py +++ b/tests/mozdef_util/query_models/test_search_query.py @@ -141,7 +141,7 @@ def test_complex_aggregation_query_execute(self): self.refresh(self.event_index_name) results = query.execute(self.es_client) - assert list(results.keys()) == ['hits', 'meta', 'aggregations'] + assert sorted(results.keys()) == ['aggregations', 'hits', 'meta'] assert list(results['meta'].keys()) == ['timed_out'] assert results['meta']['timed_out'] is False @@ -149,7 +149,7 @@ def test_complex_aggregation_query_execute(self): assert len(sorted_hits) == 3 - assert list(sorted_hits[0].keys()) == ['_score', '_id', '_source', '_index'] + assert sorted(sorted_hits[0].keys()) == ['_id', '_index', '_score', '_source'] assert type(sorted_hits[0]['_id']) == str assert sorted_hits[0]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -161,7 +161,7 @@ def test_complex_aggregation_query_execute(self): assert list(sorted_hits[0]['_source']['details'].keys()) == ['information'] assert sorted_hits[0]['_source']['details']['information'] == 'Example information' - assert list(sorted_hits[1].keys()) == ['_score', '_id', '_source', '_index'] + assert sorted(sorted_hits[1].keys()) == ['_id', '_index', '_score', '_source'] assert type(sorted_hits[1]['_id']) == str assert sorted_hits[1]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -190,12 +190,11 @@ def test_complex_aggregation_query_execute(self): assert len(results['aggregations']['ip']['terms']) == 2 - results['aggregations']['ip']['terms'].sort() - assert results['aggregations']['ip']['terms'][0]['count'] == 1 - assert results['aggregations']['ip']['terms'][0]['key'] == '127.0.0.1' + assert results['aggregations']['ip']['terms'][0]['count'] == 2 + assert results['aggregations']['ip']['terms'][0]['key'] == '1.2.3.4' - assert results['aggregations']['ip']['terms'][1]['count'] == 2 - assert results['aggregations']['ip']['terms'][1]['key'] == '1.2.3.4' + assert results['aggregations']['ip']['terms'][1]['count'] == 1 + assert results['aggregations']['ip']['terms'][1]['key'] == '127.0.0.1' def test_aggregation_without_must_fields(self): event = self.generate_default_event() @@ -221,13 +220,13 @@ def test_aggregation_query_execute(self): self.refresh(self.event_index_name) results = query.execute(self.es_client) - assert list(results.keys()) == ['hits', 'meta', 'aggregations'] + assert sorted(results.keys()) == ['aggregations', 'hits', 'meta'] assert list(results['meta'].keys()) == ['timed_out'] assert results['meta']['timed_out'] is False assert len(results['hits']) == 2 - assert list(results['hits'][0].keys()) == ['_score', '_id', '_source', '_index'] + assert sorted(results['hits'][0].keys()) == ['_id', '_index', '_score', '_source'] assert type(results['hits'][0]['_id']) == str assert results['hits'][0]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -239,7 +238,7 @@ def test_aggregation_query_execute(self): assert list(results['hits'][0]['_source']['details'].keys()) == ['information'] assert results['hits'][0]['_source']['details']['information'] == 'Example information' - assert list(results['hits'][1].keys()) == ['_score', '_id', '_source', '_index'] + assert sorted(results['hits'][1].keys()) == ['_id', '_index', '_score', '_source'] assert type(results['hits'][1]['_id']) == str assert results['hits'][1]['_index'] == datetime.now().strftime("events-%Y%m%d") @@ -271,12 +270,12 @@ def test_simple_query_execute(self): results = query.execute(self.es_client) - assert list(results.keys()) == ['hits', 'meta'] + assert sorted(results.keys()) == ['hits', 'meta'] assert list(results['meta'].keys()) == ['timed_out'] assert results['meta']['timed_out'] is False assert len(results['hits']) == 1 - assert list(results['hits'][0].keys()) == ['_score', '_id', '_source', '_index'] + assert sorted(results['hits'][0].keys()) == ['_id', '_index', '_score', '_source'] assert type(results['hits'][0]['_id']) == str assert results['hits'][0]['_index'] == datetime.now().strftime("events-%Y%m%d") diff --git a/tests/mozdef_util/query_models/test_term_match.py b/tests/mozdef_util/query_models/test_term_match.py index 71e451c5a..9825af741 100644 --- a/tests/mozdef_util/query_models/test_term_match.py +++ b/tests/mozdef_util/query_models/test_term_match.py @@ -6,90 +6,110 @@ class TestTermMatchPositiveTestSuite(PositiveTestSuite): def query_tests(self): - tests = { - TermMatch('summary', 'test'): [ - {'summary': 'test'}, - {'summary': 'Test'}, - {'summary': 'test summary'}, - {'summary': 'example test summary'}, - {'summary': 'example summary test'}, + tests = [ + [ + TermMatch('summary', 'test'), [ + {'summary': 'test'}, + {'summary': 'Test'}, + {'summary': 'test summary'}, + {'summary': 'example test summary'}, + {'summary': 'example summary test'}, + ] ], - - TermMatch('summary', 'ldap'): [ - {'summary': 'LDAP'}, - {'summary': 'lDaP'}, - {'summary': 'ldap'}, + [ + TermMatch('summary', 'ldap'), [ + {'summary': 'LDAP'}, + {'summary': 'lDaP'}, + {'summary': 'ldap'}, + ] ], - - TermMatch('summary', 'LDAP'): [ - {'summary': 'LDAP'}, - {'summary': 'lDaP'}, - {'summary': 'ldap'}, + [ + TermMatch('summary', 'LDAP'), [ + {'summary': 'LDAP'}, + {'summary': 'lDaP'}, + {'summary': 'ldap'}, + ] ], - - TermMatch('summary', 'LDAP_INVALID_CREDENTIALS'): [ - {'summary': 'LDaP_InVaLID_CREDeNTiALS'}, + [ + TermMatch('summary', 'LDAP_INVALID_CREDENTIALS'), [ + {'summary': 'LDaP_InVaLID_CREDeNTiALS'}, + ] ], - - TermMatch('details.results', 'LDAP_INVALID_CREDENTIALS'): [ - { - 'details': { - "results": "LDAP_INVALID_CREDENTIALS", + [ + TermMatch('details.results', 'LDAP_INVALID_CREDENTIALS'), [ + { + 'details': { + "results": "LDAP_INVALID_CREDENTIALS", + } } - } + ] ], - - TermMatch('hostname', 'hostname.domain.com'): [ - {'hostname': 'hostname.domain.com'}, + [ + TermMatch('hostname', 'hostname.domain.com'), [ + {'hostname': 'hostname.domain.com'}, + ] ], - - TermMatch('somekey', 'tag'): [ - {'somekey': ['tag', 'other']}, + [ + TermMatch('somekey', 'tag'), [ + {'somekey': ['tag', 'other']}, + ] ], - } + ] return tests class TestTermMatchNegativeTestSuite(NegativeTestSuite): def query_tests(self): - tests = { - TermMatch('details.resultss', 'ldap'): [ - { - 'details': { - "resultss": "LDAP", + tests = [ + [ + TermMatch('details.resultss', 'ldap'), [ + { + 'details': { + "resultss": "LDAP", + } } - } + ] ], - TermMatch('summary', 'test'): [ - {'summary': 'example summary'}, - {'summary': 'example summary tes'}, - {'summary': 'testing'}, - {'summary': 'test.mozilla.domain'}, - {'summary': 'mozilla.test.domain'}, - {'summary': 'mozilla.test'}, + [ + TermMatch('summary', 'test'), [ + {'summary': 'example summary'}, + {'summary': 'example summary tes'}, + {'summary': 'testing'}, + {'summary': 'test.mozilla.domain'}, + {'summary': 'mozilla.test.domain'}, + {'summary': 'mozilla.test'}, + ] ], - TermMatch('note', 'test'): [ - {'note': 'example note'}, - {'note': 'example note tes'}, - {'note': 'testing'}, - {'summnoteary': 'test.mozilla.domain'}, - {'note': 'mozilla.test.domain'}, - {'note': 'mozilla.test'}, + [ + TermMatch('note', 'test'), [ + {'note': 'example note'}, + {'note': 'example note tes'}, + {'note': 'testing'}, + {'summnoteary': 'test.mozilla.domain'}, + {'note': 'mozilla.test.domain'}, + {'note': 'mozilla.test'}, + ] ], - TermMatch('summary', 'sum'): [ - {'summary': 'example test summary'}, - {'summary': 'example summary'}, - {'summary': 'summary test'}, - {'summary': 'summary'}, + [ + TermMatch('summary', 'sum'), [ + {'summary': 'example test summary'}, + {'summary': 'example summary'}, + {'summary': 'summary test'}, + {'summary': 'summary'}, + ] ], - TermMatch('hostname', 'hostname.domain.com'): [ - {'hostname': 'sub.hostname.domain.com'}, - {'hostname': 'hostnames.domain.com'}, - {'hostname': 'domain.com'}, - {'hostname': 'com'}, + [ + TermMatch('hostname', 'hostname.domain.com'), [ + {'hostname': 'sub.hostname.domain.com'}, + {'hostname': 'hostnames.domain.com'}, + {'hostname': 'domain.com'}, + {'hostname': 'com'}, + ] ], - TermMatch('somekey', 'tag'): [ - {'somekey': ['atag', 'tagging']}, + [ + TermMatch('somekey', 'tag'), [ + {'somekey': ['atag', 'tagging']}, + ] ], - } + ] return tests diff --git a/tests/mozdef_util/query_models/test_terms_match.py b/tests/mozdef_util/query_models/test_terms_match.py index 5bc3ce573..2b93d5d05 100644 --- a/tests/mozdef_util/query_models/test_terms_match.py +++ b/tests/mozdef_util/query_models/test_terms_match.py @@ -6,35 +6,42 @@ class TestTermsMatchPositiveTestSuite(PositiveTestSuite): def query_tests(self): - tests = { - TermsMatch('summary', ['test']): [ - {'summary': 'test'}, - {'summary': 'test summary'}, - {'summary': 'example test summary'}, - {'summary': 'example summary test'}, + tests = [ + [ + TermsMatch('summary', ['test']), [ + {'summary': 'test'}, + {'summary': 'test summary'}, + {'summary': 'example test summary'}, + {'summary': 'example summary test'}, + ] ], - - TermsMatch('summary', ['test', 'redfred']): [ - {'summary': 'test'}, - {'summary': 'redfred'}, - {'summary': 'test summary'}, - {'summary': 'example test summary'}, - {'summary': 'example redfred summary test'}, + [ + TermsMatch('summary', ['test', 'redfred']), [ + {'summary': 'test'}, + {'summary': 'redfred'}, + {'summary': 'test summary'}, + {'summary': 'example test summary'}, + {'summary': 'example redfred summary test'}, + ] ], - } + ] return tests class TestTermsMatchNegativeTestSuite(NegativeTestSuite): def query_tests(self): - tests = { - TermsMatch('summary', ['test']): [ - {'summary': 'example summary'}, - {'summary': 'example summary tes'}, + tests = [ + [ + TermsMatch('summary', ['test']), [ + {'summary': 'example summary'}, + {'summary': 'example summary tes'}, + ] ], - TermsMatch('summary', ['test', 'exam']): [ - {'summary': 'example summary'}, - {'summary': 'example summary tes'}, + [ + TermsMatch('summary', ['test', 'exam']), [ + {'summary': 'example summary'}, + {'summary': 'example summary tes'}, + ] ] - } + ] return tests diff --git a/tests/mozdef_util/query_models/test_wildcard_match.py b/tests/mozdef_util/query_models/test_wildcard_match.py index 75e142f26..ae87b81f5 100644 --- a/tests/mozdef_util/query_models/test_wildcard_match.py +++ b/tests/mozdef_util/query_models/test_wildcard_match.py @@ -6,45 +6,53 @@ class TestWildcardMatchPositiveTestSuite(PositiveTestSuite): def query_tests(self): - tests = { - WildcardMatch('summary', 'te*'): [ - {'summary': 'test'}, - {'summary': 'test summary'}, - {'summary': 'example test summary'}, - {'summary': 'example summary test'}, + tests = [ + [ + WildcardMatch('summary', 'te*'), [ + {'summary': 'test'}, + {'summary': 'test summary'}, + {'summary': 'example test summary'}, + {'summary': 'example summary test'}, + ] ], - - WildcardMatch('summary', '*te*'): [ - {'summary': 'abcteabc'}, - {'summary': 'abc te abc'}, - {'summary': 'abc te'}, + [ + WildcardMatch('summary', '*te*'), [ + {'summary': 'abcteabc'}, + {'summary': 'abc te abc'}, + {'summary': 'abc te'}, + ] ], - - WildcardMatch('details.ip', '19*'): [ - {'details': {'ip': '192.168.1.1'}}, - {'details': {'ip': '19.168.1.1'}}, + [ + WildcardMatch('details.ip', '19*'), [ + {'details': {'ip': '192.168.1.1'}}, + {'details': {'ip': '19.168.1.1'}}, + ] ], - - WildcardMatch('details.ip', '*1.0'): [ - {'details': {'ip': '192.168.1.0'}}, + [ + WildcardMatch('details.ip', '*1.0'), [ + {'details': {'ip': '192.168.1.0'}}, + ] ], - } + ] return tests class TestWildcardMatchNegativeTestSuite(NegativeTestSuite): def query_tests(self): - tests = { - WildcardMatch('summary', 'te*'): [ - {'summary': 'example summary'}, - {'summary': 'tabs 4 spaces'}, + tests = [ + [ + WildcardMatch('summary', 'te*'), [ + {'summary': 'example summary'}, + {'summary': 'tabs 4 spaces'}, + ] ], - - WildcardMatch('details.ip', '19*'): [ - {'details': {'ip': '10.1.1.1'}}, - {'details': {'ip': '2.168.1.192'}}, - {'details': {'ip': '10.19.1.1'}}, - {'details': {'ipaddress': '10.19.1.1'}}, + [ + WildcardMatch('details.ip', '19*'), [ + {'details': {'ip': '10.1.1.1'}}, + {'details': {'ip': '2.168.1.192'}}, + {'details': {'ip': '10.19.1.1'}}, + {'details': {'ipaddress': '10.19.1.1'}}, + ] ], - } + ] return tests From ba1726396b89ea098e71b9f5b2c226afff0592ee Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 18:53:34 -0500 Subject: [PATCH 24/63] Revert time commented out --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 59459833b..2ba78f23d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -42,4 +42,4 @@ def pytest_configure(config): warning_text += "Continuing the unit test execution in 10 seconds...CANCEL ME IF YOU DO NOT WANT PREVIOUS QUEUES PURGED!!! **\n" print(warning_text) - # time.sleep(10) + time.sleep(10) From 8506c4eb1a90346ef82304c2923ce83bb23d3e81 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 18:54:05 -0500 Subject: [PATCH 25/63] Update syntax in cloudtrail worker --- mq/esworker_cloudtrail.py | 6 +++--- mq/esworker_eventtask.py | 2 +- mq/esworker_sns_sqs.py | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index 0cd2cf7f7..754c83685 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -66,7 +66,7 @@ def __init__(self, region_name='us-east-1', aws_access_key_id=None, aws_secret_a try: if self.session_credentials is None or self.session_credentials.is_expired(): self.session_credentials = self.local_conn_sts.get_session_token() - except Exception, e: + except Exception as e: logger.error("Unable to get session token due to exception {0}".format(e)) raise try: @@ -76,7 +76,7 @@ def __init__(self, region_name='us-east-1', aws_access_key_id=None, aws_secret_a self.session_credentials.secret_key, self.session_credentials.session_token) if self.session_credentials else {} self.session_conn_sts = boto.sts.connect_to_region(**creds) - except Exception, e: + except Exception as e: logger.error("Unable to connect to STS with session token due to exception {0}".format(e)) raise self.conn_sts = self.session_conn_sts @@ -103,7 +103,7 @@ def assume_role(self, role_session_name=role_session_name, policy=policy).credentials logger.debug("Assumed new role with credential %s" % self.credentials[role_arn].to_dict()) - except Exception, e: + except Exception as e: logger.error("Unable to assume role {0} due to exception {1}".format(role_arn, e)) self.credentials[role_arn] = False return self.credentials[role_arn] diff --git a/mq/esworker_eventtask.py b/mq/esworker_eventtask.py index 98b9298a2..e29fb5995 100755 --- a/mq/esworker_eventtask.py +++ b/mq/esworker_eventtask.py @@ -22,7 +22,7 @@ from mozdef_util.utilities.to_unicode import toUnicode from mozdef_util.utilities.remove_at import removeAt -from lib.plugins import sendEventToPlugins, registerPlugins +from .lib.plugins import sendEventToPlugins, registerPlugins # running under uwsgi? diff --git a/mq/esworker_sns_sqs.py b/mq/esworker_sns_sqs.py index bdca5ae8e..9120cfade 100755 --- a/mq/esworker_sns_sqs.py +++ b/mq/esworker_sns_sqs.py @@ -23,9 +23,9 @@ from mozdef_util.utilities.logger import logger, initLogger from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchBadServer, ElasticsearchInvalidIndex, ElasticsearchException -from lib.aws import get_aws_credentials -from lib.plugins import sendEventToPlugins, registerPlugins -from lib.sqs import connect_sqs +from .lib.aws import get_aws_credentials +from .lib.plugins import sendEventToPlugins, registerPlugins +from .lib.sqs import connect_sqs # running under uwsgi? From db2f337ce84ad91bbeb37d893c63df1df9fb8d4d Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Fri, 28 Jun 2019 19:03:35 -0500 Subject: [PATCH 26/63] Update exception string in test state --- tests/mozdef_util/test_state.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/mozdef_util/test_state.py b/tests/mozdef_util/test_state.py index be9d9bb1e..f029ca162 100644 --- a/tests/mozdef_util/test_state.py +++ b/tests/mozdef_util/test_state.py @@ -23,7 +23,7 @@ def test_bad_state_file(self): with pytest.raises(StateParsingError) as state_exception: State(state_path) expected_message = state_path + " state file found but isn't a recognized json format" - assert state_exception.value.message == expected_message + assert str(state_exception.value) == expected_message class TestStateSave(object): From 4cc481272aee10f884b1c6ed5c540b124133eb31 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sat, 29 Jun 2019 01:09:03 -0500 Subject: [PATCH 27/63] Build base container before all other containers --- Makefile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Makefile b/Makefile index 95f496a5d..fe8c18705 100644 --- a/Makefile +++ b/Makefile @@ -66,6 +66,7 @@ build: build-from-cwd .PHONY: build-from-cwd build-from-cwd: ## Build local MozDef images (use make NO_CACHE=--no-cache build to disable caching) + docker-compose -f docker/compose/docker-compose.yml -p $(NAME) $(BUILD_MODE) $(PARALLEL) $(NO_CACHE) base docker-compose -f docker/compose/docker-compose.yml -p $(NAME) $(BUILD_MODE) $(PARALLEL) $(NO_CACHE) .PHONY: build-from-github @@ -76,6 +77,7 @@ build-from-github: ## Build local MozDef images from the github branch (use mak .PHONY: build-tests build-tests: ## Build end-to-end test environment only + docker-compose -f docker/compose/docker-compose-tests.yml -p test-$(NAME) $(NO_CACHE) $(BUILD_MODE) base docker-compose -f docker/compose/docker-compose-tests.yml -p test-$(NAME) $(NO_CACHE) $(BUILD_MODE) .PHONY: stop From 2bfec1e484d2df69d1a543928d6b7e4f3f7f96e4 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sat, 29 Jun 2019 01:21:25 -0500 Subject: [PATCH 28/63] Temporarily modify configlib file --- docker/compose/mozdef_base/Dockerfile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker/compose/mozdef_base/Dockerfile b/docker/compose/mozdef_base/Dockerfile index 55b567616..624150acb 100644 --- a/docker/compose/mozdef_base/Dockerfile +++ b/docker/compose/mozdef_base/Dockerfile @@ -62,3 +62,7 @@ VOLUME /opt/mozdef/envs/mozdef/data ENV PATH=/opt/mozdef/envs/python/bin:$PATH USER root + +# Remove once https://github.com/jeffbryner/configlib/pull/9 is mergeg +# and a new version of configlib is in place +RUN sed -i 's/from configlib import getConfig/from .configlib import getConfig/g' /opt/mozdef/envs/python/lib/python3.6/site-packages/configlib/__init__.py \ No newline at end of file From c7954eebba119f9da997adc218955a586fe9abed Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sat, 29 Jun 2019 13:51:40 -0500 Subject: [PATCH 29/63] Fixup alerttask and deadman test case --- alerts/lib/alerttask.py | 3 +++ alerts/lib/deadman_alerttask.py | 5 +++-- tests/alerts/alert_test_case.py | 1 + 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/alerts/lib/alerttask.py b/alerts/lib/alerttask.py index 8cbf9a504..79faa488a 100644 --- a/alerts/lib/alerttask.py +++ b/alerts/lib/alerttask.py @@ -123,6 +123,9 @@ def parse_config(self, config_filename, config_keys): temp_value = getConfig(config_key, "", config_filename) setattr(self.config, config_key, temp_value) + def close_connections(self): + self.mqConn.release() + def _discover_task_exchange(self): """Use configuration information to understand the message queue protocol. return: amqp, sqs diff --git a/alerts/lib/deadman_alerttask.py b/alerts/lib/deadman_alerttask.py index 4bad48247..c08c6e799 100644 --- a/alerts/lib/deadman_alerttask.py +++ b/alerts/lib/deadman_alerttask.py @@ -1,4 +1,4 @@ -from alerttask import AlertTask +from .alerttask import AlertTask class DeadmanAlertTask(AlertTask): @@ -6,4 +6,5 @@ class DeadmanAlertTask(AlertTask): def executeSearchEventsSimple(self): # We override this method to specify the size as 1 # since we only care about if ANY events are found or not - return self.main_query.execute(self.es, indices=self.event_indices, size=1) + results = self.main_query.execute(self.es, indices=self.event_indices, size=1) + return results diff --git a/tests/alerts/alert_test_case.py b/tests/alerts/alert_test_case.py index c9cb1ecf1..5b4d6022a 100644 --- a/tests/alerts/alert_test_case.py +++ b/tests/alerts/alert_test_case.py @@ -22,4 +22,5 @@ def run(self, alert_filename, alert_classname): alert_task = alert_class_attr() alert_task.run() + alert_task.close_connections() return alert_task From 41f1d77b64c0cdb07f3b1b3dfac930168ba791b8 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sat, 29 Jun 2019 14:08:03 -0500 Subject: [PATCH 30/63] Fixup plugin set test --- tests/mozdef_util/test_plugin_set.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/mozdef_util/test_plugin_set.py b/tests/mozdef_util/test_plugin_set.py index 8bc78a614..48bef3c14 100644 --- a/tests/mozdef_util/test_plugin_set.py +++ b/tests/mozdef_util/test_plugin_set.py @@ -12,9 +12,11 @@ def setup(self): } def test_registered_plugins(self): - total_num_plugins = len([name for name in os.listdir(self.plugin_dir)]) - # We exclude the __init__.py file - assert len(self.plugin_set.enabled_plugins) == total_num_plugins - 1 + total_num_plugins = 0 + for name in os.listdir(self.plugin_dir): + if name.startswith('plugin'): + total_num_plugins += 1 + assert len(self.plugin_set.enabled_plugins) == total_num_plugins def test_registered_plugins_specific_enabled_plugins(self): enabled_plugins = ['plugin1'] From add50f0356756469c1ee1e52f13faee7df85f280 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sat, 29 Jun 2019 14:57:41 -0500 Subject: [PATCH 31/63] Fix import path for rest plugins --- tests/rest/rest_test_suite.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/rest/rest_test_suite.py b/tests/rest/rest_test_suite.py index 502e78283..10f23bdee 100644 --- a/tests/rest/rest_test_suite.py +++ b/tests/rest/rest_test_suite.py @@ -8,6 +8,7 @@ import mock from configlib import OptionParser +import importlib class RestTestDict(DotDict): @@ -23,6 +24,10 @@ def setup(self): sample_config.configfile = os.path.join(os.path.dirname(__file__), 'index.conf') OptionParser.parse_args = mock.Mock(return_value=(sample_config, {})) + sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../../rest")) + import plugins + importlib.reload(plugins) from rest import index + self.application = index.application super(RestTestSuite, self).setup() From 9a075dcbe0dca56b494da4c2bb2b4a31e6d2c664 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sat, 29 Jun 2019 15:11:00 -0500 Subject: [PATCH 32/63] Remove unicode-u keyword --- alerts/actions/dashboard_geomodel.py | 16 +- alerts/feedback_events.py | 2 +- alerts/lib/alert_plugin_set.py | 2 +- .../build/lib/alert_plugin_set.py | 2 +- docs/source/conf.py | 10 +- mq/esworker_cloudtrail.py | 54 +- mq/esworker_eventtask.py | 48 +- mq/esworker_papertrail.py | 38 +- mq/esworker_sns_sqs.py | 4 +- mq/esworker_sqs.py | 38 +- mq/plugins/broFixup.py | 532 +++++++++--------- mq/plugins/squidFixup.py | 62 +- mq/plugins/suricataFixup.py | 158 +++--- .../alerts/actions/test_dashboard_geomodel.py | 26 +- tests/alerts/alert_test_suite.py | 2 +- tests/alerts/test_feedback_events.py | 38 +- tests/alerts/test_geomodel.py | 28 +- .../mozdef_util/test_elasticsearch_client.py | 4 +- tests/mq/plugins/test_broFixup.py | 42 +- tests/mq/plugins/test_suricataFixup.py | 14 +- tests/mq/test_esworker_eventtask.py | 28 +- tests/mq/test_esworker_sns_sqs.py | 144 ++--- 22 files changed, 646 insertions(+), 646 deletions(-) diff --git a/alerts/actions/dashboard_geomodel.py b/alerts/actions/dashboard_geomodel.py index 74c99006e..390d43f51 100644 --- a/alerts/actions/dashboard_geomodel.py +++ b/alerts/actions/dashboard_geomodel.py @@ -85,26 +85,26 @@ def onMessage(self, message): whois = IPWhois(source_ip).lookup_whois() whois_str = whois['nets'][0]['description'] source_ip_isp = whois_str.replace('\n', ', ').replace('\r', '') - new_ip_info = u'{} ({})'.format(source_ip, source_ip_isp) + new_ip_info = '{} ({})'.format(source_ip, source_ip_isp) except Exception: - new_ip_info = u'{}'.format(source_ip) + new_ip_info = '{}'.format(source_ip) - new_location_str = u"" + new_location_str = "" if city.lower() == 'unknown': - new_location_str += u'{0}'.format(country) + new_location_str += '{0}'.format(country) else: - new_location_str += u'{0}, {1}'.format(city, country) + new_location_str += '{0}, {1}'.format(city, country) event_timestamp = toUTC(message['events'][0]['documentsource']['details']['event_time']) event_day = event_timestamp.strftime('%B %d, %Y') - summary = u'On {0} (UTC), did you login from {1} ({2})?'.format(event_day, new_location_str, source_ip) + summary = 'On {0} (UTC), did you login from {1} ({2})?'.format(event_day, new_location_str, source_ip) previous_city = message['details']['previous_locality_details']['city'] previous_country = message['details']['previous_locality_details']['country'] if previous_city.lower() == 'unknown': - previous_location_str = u'{0}'.format(previous_country) + previous_location_str = '{0}'.format(previous_country) else: - previous_location_str = u'{0}, {1}'.format(previous_city, previous_country) + previous_location_str = '{0}, {1}'.format(previous_city, previous_country) alert_record = { 'alert_id': b2a_hex(os.urandom(15)).decode(), diff --git a/alerts/feedback_events.py b/alerts/feedback_events.py index b9152283a..9ec9e39db 100644 --- a/alerts/feedback_events.py +++ b/alerts/feedback_events.py @@ -31,7 +31,7 @@ def onEvent(self, event): user = event['_source']['details']['alert_information']['user_id'] event_summary = event['_source']['summary'] event_date = event['_source']['details']['alert_information']['date'] - summary = u"{} escalated alert within single-sign on (SSO) dashboard. Event Date: {} Summary: \"{}\"".format(user, event_date, event_summary) + summary = "{} escalated alert within single-sign on (SSO) dashboard. Event Date: {} Summary: \"{}\"".format(user, event_date, event_summary) for alert_code, tag in self._config.items(): if event['_source']['details']['alert_information']['alert_code'] == alert_code: diff --git a/alerts/lib/alert_plugin_set.py b/alerts/lib/alert_plugin_set.py index aa8a32b92..73f2e765f 100644 --- a/alerts/lib/alert_plugin_set.py +++ b/alerts/lib/alert_plugin_set.py @@ -6,7 +6,7 @@ class AlertPluginSet(PluginSet): def send_message_to_plugin(self, plugin_class, message, metadata=None): if 'utctimestamp' in message and 'summary' in message: - message_log_str = u'{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary']) + message_log_str = '{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary']) logger.info(message_log_str) return plugin_class.onMessage(message), metadata diff --git a/cloudy_mozdef/lambda_layer/build/lib/alert_plugin_set.py b/cloudy_mozdef/lambda_layer/build/lib/alert_plugin_set.py index aa8a32b92..73f2e765f 100644 --- a/cloudy_mozdef/lambda_layer/build/lib/alert_plugin_set.py +++ b/cloudy_mozdef/lambda_layer/build/lib/alert_plugin_set.py @@ -6,7 +6,7 @@ class AlertPluginSet(PluginSet): def send_message_to_plugin(self, plugin_class, message, metadata=None): if 'utctimestamp' in message and 'summary' in message: - message_log_str = u'{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary']) + message_log_str = '{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary']) logger.info(message_log_str) return plugin_class.onMessage(message), metadata diff --git a/docs/source/conf.py b/docs/source/conf.py index 61df2e7eb..22ebc9b73 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -42,8 +42,8 @@ master_doc = 'index' # General information about the project. -project = u'MozDef' -copyright = u'2014, Mozilla' +project = 'MozDef' +copyright = '2014, Mozilla' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -202,7 +202,7 @@ def setup(app): # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'MozDef.tex', u'MozDef Documentation', u'Mozilla', 'manual'), + ('index', 'MozDef.tex', 'MozDef Documentation', 'Mozilla', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -231,7 +231,7 @@ def setup(app): # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'mozdef', u'MozDef Documentation', [u'Mozilla'], 1) + ('index', 'mozdef', 'MozDef Documentation', ['Mozilla'], 1) ] # If true, show URL addresses after external links. @@ -244,7 +244,7 @@ def setup(app): # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'MozDef', u'MozDef Documentation', u'Mozilla', 'MozDef', 'MozDef: The Mozilla Defense Platform.', 'Miscellaneous'), + ('index', 'MozDef', 'MozDef Documentation', 'Mozilla', 'MozDef', 'MozDef: The Mozilla Defense Platform.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index 754c83685..1625d0f43 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -164,72 +164,72 @@ def keyMapping(aDict): k = removeAt(k).lower() if k == 'sourceip': - returndict[u'details']['sourceipaddress'] = v + returndict['details']['sourceipaddress'] = v elif k == 'sourceipaddress': - returndict[u'details']['sourceipaddress'] = v + returndict['details']['sourceipaddress'] = v elif k in ('facility', 'source'): - returndict[u'source'] = v + returndict['source'] = v elif k in ('eventsource'): - returndict[u'hostname'] = v + returndict['hostname'] = v elif k in ('message', 'summary'): - returndict[u'summary'] = toUnicode(v) + returndict['summary'] = toUnicode(v) elif k in ('payload') and 'summary' not in aDict: # special case for heka if it sends payload as well as a summary, keep both but move payload to the details section. - returndict[u'summary'] = toUnicode(v) + returndict['summary'] = toUnicode(v) elif k in ('payload'): - returndict[u'details']['payload'] = toUnicode(v) + returndict['details']['payload'] = toUnicode(v) elif k in ('eventtime', 'timestamp', 'utctimestamp', 'date'): - returndict[u'utctimestamp'] = toUTC(v).isoformat() - returndict[u'timestamp'] = toUTC(v).isoformat() + returndict['utctimestamp'] = toUTC(v).isoformat() + returndict['timestamp'] = toUTC(v).isoformat() elif k in ('hostname', 'source_host', 'host'): - returndict[u'hostname'] = toUnicode(v) + returndict['hostname'] = toUnicode(v) elif k in ('tags'): if 'tags' not in returndict: - returndict[u'tags'] = [] + returndict['tags'] = [] if type(v) == list: - returndict[u'tags'] += v + returndict['tags'] += v else: if len(v) > 0: - returndict[u'tags'].append(v) + returndict['tags'].append(v) # nxlog keeps the severity name in syslogseverity,everyone else should use severity or level. elif k in ('syslogseverity', 'severity', 'severityvalue', 'level', 'priority'): - returndict[u'severity'] = toUnicode(v).upper() + returndict['severity'] = toUnicode(v).upper() elif k in ('facility', 'syslogfacility'): - returndict[u'facility'] = toUnicode(v) + returndict['facility'] = toUnicode(v) elif k in ('pid', 'processid'): - returndict[u'processid'] = toUnicode(v) + returndict['processid'] = toUnicode(v) # nxlog sets sourcename to the processname (i.e. sshd), everyone else should call it process name or pname elif k in ('pname', 'processname', 'sourcename', 'program'): - returndict[u'processname'] = toUnicode(v) + returndict['processname'] = toUnicode(v) # the file, or source elif k in ('path', 'logger', 'file'): - returndict[u'eventsource'] = toUnicode(v) + returndict['eventsource'] = toUnicode(v) elif k in ('type', 'eventtype', 'category'): - returndict[u'category'] = toUnicode(v) - returndict[u'type'] = 'cloudtrail' + returndict['category'] = toUnicode(v) + returndict['type'] = 'cloudtrail' # custom fields as a list/array elif k in ('fields', 'details'): if type(v) is not dict: - returndict[u'details'][u'message'] = v + returndict['details']['message'] = v else: if len(v) > 0: for details_key, details_value in v.items(): - returndict[u'details'][details_key] = details_value + returndict['details'][details_key] = details_value # custom fields/details as a one off, not in an array # i.e. fields.something=value or details.something=value @@ -239,20 +239,20 @@ def keyMapping(aDict): newName = newName.lower().replace('details.', '') # add a dict to hold the details if it doesn't exist if 'details' not in returndict: - returndict[u'details'] = dict() + returndict['details'] = dict() # add field with a special case for shippers that # don't send details # in an array as int/floats/strings # we let them dictate the data type with field_datatype # convention if newName.endswith('_int'): - returndict[u'details'][str(newName)] = int(v) + returndict['details'][str(newName)] = int(v) elif newName.endswith('_float'): - returndict[u'details'][str(newName)] = float(v) + returndict['details'][str(newName)] = float(v) else: - returndict[u'details'][str(newName)] = toUnicode(v) + returndict['details'][str(newName)] = toUnicode(v) else: - returndict[u'details'][k] = v + returndict['details'][k] = v if 'utctimestamp' not in returndict: # default in case we don't find a reasonable timestamp diff --git a/mq/esworker_eventtask.py b/mq/esworker_eventtask.py index e29fb5995..a142a07fc 100755 --- a/mq/esworker_eventtask.py +++ b/mq/esworker_eventtask.py @@ -49,71 +49,71 @@ def keyMapping(aDict): # set the timestamp when we received it, i.e. now returndict['receivedtimestamp'] = toUTC(datetime.now()).isoformat() returndict['mozdefhostname'] = options.mozdefhostname - returndict[u'details'] = {} + returndict['details'] = {} try: for k, v in aDict.items(): k = removeAt(k).lower() if k == 'sourceip': - returndict[u'details']['eventsourceipaddress'] = v + returndict['details']['eventsourceipaddress'] = v if k in ('facility', 'source'): - returndict[u'source'] = v + returndict['source'] = v if k in ('message', 'summary'): - returndict[u'summary'] = toUnicode(v) + returndict['summary'] = toUnicode(v) if k in ('payload') and 'summary' not in aDict: # special case for heka if it sends payload as well as a summary, keep both but move payload to the details section. - returndict[u'summary'] = toUnicode(v) + returndict['summary'] = toUnicode(v) elif k in ('payload'): - returndict[u'details']['payload'] = toUnicode(v) + returndict['details']['payload'] = toUnicode(v) if k in ('eventtime', 'timestamp', 'utctimestamp', 'date'): - returndict[u'utctimestamp'] = toUTC(v).isoformat() - returndict[u'timestamp'] = toUTC(v).isoformat() + returndict['utctimestamp'] = toUTC(v).isoformat() + returndict['timestamp'] = toUTC(v).isoformat() if k in ('hostname', 'source_host', 'host'): - returndict[u'hostname'] = toUnicode(v) + returndict['hostname'] = toUnicode(v) if k in ('tags'): if 'tags' not in returndict: - returndict[u'tags'] = [] + returndict['tags'] = [] if type(v) == list: - returndict[u'tags'] += v + returndict['tags'] += v else: if len(v) > 0: - returndict[u'tags'].append(v) + returndict['tags'].append(v) # nxlog keeps the severity name in syslogseverity,everyone else should use severity or level. if k in ('syslogseverity', 'severity', 'severityvalue', 'level', 'priority'): - returndict[u'severity'] = toUnicode(v).upper() + returndict['severity'] = toUnicode(v).upper() if k in ('facility', 'syslogfacility'): - returndict[u'facility'] = toUnicode(v) + returndict['facility'] = toUnicode(v) if k in ('pid', 'processid'): - returndict[u'processid'] = toUnicode(v) + returndict['processid'] = toUnicode(v) # nxlog sets sourcename to the processname (i.e. sshd), everyone else should call it process name or pname if k in ('pname', 'processname', 'sourcename', 'program'): - returndict[u'processname'] = toUnicode(v) + returndict['processname'] = toUnicode(v) # the file, or source if k in ('path', 'logger', 'file'): - returndict[u'eventsource'] = toUnicode(v) + returndict['eventsource'] = toUnicode(v) if k in ('type', 'eventtype', 'category'): - returndict[u'category'] = toUnicode(v) + returndict['category'] = toUnicode(v) # custom fields as a list/array if k in ('fields', 'details'): if type(v) is not dict: - returndict[u'details'][u'message'] = v + returndict['details']['message'] = v else: if len(v) > 0: for details_key, details_value in v.iteritems(): - returndict[u'details'][details_key] = details_value + returndict['details'][details_key] = details_value # custom fields/details as a one off, not in an array # i.e. fields.something=value or details.something=value @@ -127,17 +127,17 @@ def keyMapping(aDict): # we let them dictate the data type with field_datatype # convention if newName.endswith('_int'): - returndict[u'details'][str(newName)] = int(v) + returndict['details'][str(newName)] = int(v) elif newName.endswith('_float'): - returndict[u'details'][str(newName)] = float(v) + returndict['details'][str(newName)] = float(v) else: - returndict[u'details'][str(newName)] = toUnicode(v) + returndict['details'][str(newName)] = toUnicode(v) # nxlog windows log handling if 'Domain' in aDict and 'SourceModuleType' in aDict: # nxlog parses all windows event fields very well # copy all fields to details - returndict[u'details'][k] = v + returndict['details'][k] = v if 'utctimestamp' not in returndict: # default in case we don't find a reasonable timestamp diff --git a/mq/esworker_papertrail.py b/mq/esworker_papertrail.py index 3d907bd55..f2b0f731e 100755 --- a/mq/esworker_papertrail.py +++ b/mq/esworker_papertrail.py @@ -124,54 +124,54 @@ def keyMapping(aDict): k = removeAt(k).lower() if k in ('message', 'summary'): - returndict[u'summary'] = toUnicode(v) + returndict['summary'] = toUnicode(v) if k in ('payload') and 'summary' not in aDict: # special case for heka if it sends payload as well as a summary, keep both but move payload to the details section. - returndict[u'summary'] = toUnicode(v) + returndict['summary'] = toUnicode(v) elif k in ('payload'): - returndict[u'details']['payload'] = toUnicode(v) + returndict['details']['payload'] = toUnicode(v) if k in ('eventtime', 'timestamp', 'utctimestamp'): - returndict[u'utctimestamp'] = toUTC(v).isoformat() - returndict[u'timestamp'] = toUTC(v).isoformat() + returndict['utctimestamp'] = toUTC(v).isoformat() + returndict['timestamp'] = toUTC(v).isoformat() if k in ('hostname', 'source_host', 'host'): - returndict[u'hostname'] = toUnicode(v) + returndict['hostname'] = toUnicode(v) if k in ('tags'): if len(v) > 0: - returndict[u'tags'] = v + returndict['tags'] = v # nxlog keeps the severity name in syslogseverity,everyone else should use severity or level. if k in ('syslogseverity', 'severity', 'severityvalue', 'level'): - returndict[u'severity'] = toUnicode(v).upper() + returndict['severity'] = toUnicode(v).upper() if k in ('facility', 'syslogfacility','source'): - returndict[u'source'] = toUnicode(v) + returndict['source'] = toUnicode(v) if k in ('pid', 'processid'): - returndict[u'processid'] = toUnicode(v) + returndict['processid'] = toUnicode(v) # nxlog sets sourcename to the processname (i.e. sshd), everyone else should call it process name or pname if k in ('pname', 'processname', 'sourcename'): - returndict[u'processname'] = toUnicode(v) + returndict['processname'] = toUnicode(v) # the file, or source if k in ('path', 'logger', 'file'): - returndict[u'eventsource'] = toUnicode(v) + returndict['eventsource'] = toUnicode(v) if k in ('type', 'eventtype', 'category'): - returndict[u'category'] = toUnicode(v) + returndict['category'] = toUnicode(v) # custom fields as a list/array if k in ('fields', 'details'): if type(v) is not dict: - returndict[u'details'][u'message'] = v + returndict['details']['message'] = v else: if len(v) > 0: for details_key, details_value in v.items(): - returndict[u'details'][details_key] = details_value + returndict['details'][details_key] = details_value # custom fields/details as a one off, not in an array # i.e. fields.something=value or details.something=value @@ -185,17 +185,17 @@ def keyMapping(aDict): # we let them dictate the data type with field_datatype # convention if newName.endswith('_int'): - returndict[u'details'][str(newName)] = int(v) + returndict['details'][str(newName)] = int(v) elif newName.endswith('_float'): - returndict[u'details'][str(newName)] = float(v) + returndict['details'][str(newName)] = float(v) else: - returndict[u'details'][str(newName)] = toUnicode(v) + returndict['details'][str(newName)] = toUnicode(v) # nxlog windows log handling if 'Domain' in aDict and 'SourceModuleType' in aDict: # nxlog parses all windows event fields very well # copy all fields to details - returndict[u'details'][k] = v + returndict['details'][k] = v if 'utctimestamp' not in returndict: # default in case we don't find a reasonable timestamp diff --git a/mq/esworker_sns_sqs.py b/mq/esworker_sns_sqs.py index 9120cfade..4e4739b58 100755 --- a/mq/esworker_sns_sqs.py +++ b/mq/esworker_sns_sqs.py @@ -128,11 +128,11 @@ def on_message(self, message): event['source'] = inside_message_value elif inside_message_key in ('fields', 'details'): if type(inside_message_value) is not dict: - event[u'details'][u'message'] = inside_message_value + event['details']['message'] = inside_message_value else: if len(inside_message_value) > 0: for details_key, details_value in inside_message_value.items(): - event[u'details'][details_key] = details_value + event['details'][details_key] = details_value else: event['details'][inside_message_key] = inside_message_value except ValueError: diff --git a/mq/esworker_sqs.py b/mq/esworker_sqs.py index c00973837..4c66df401 100755 --- a/mq/esworker_sqs.py +++ b/mq/esworker_sqs.py @@ -63,54 +63,54 @@ def keyMapping(aDict): k = removeAt(k).lower() if k in ('message', 'summary'): - returndict[u'summary'] = toUnicode(v) + returndict['summary'] = toUnicode(v) if k in ('payload') and 'summary' not in aDict: # special case for heka if it sends payload as well as a summary, keep both but move payload to the details section. - returndict[u'summary'] = toUnicode(v) + returndict['summary'] = toUnicode(v) elif k in ('payload'): - returndict[u'details']['payload'] = toUnicode(v) + returndict['details']['payload'] = toUnicode(v) if k in ('eventtime', 'timestamp', 'utctimestamp'): - returndict[u'utctimestamp'] = toUTC(v).isoformat() - returndict[u'timestamp'] = toUTC(v).isoformat() + returndict['utctimestamp'] = toUTC(v).isoformat() + returndict['timestamp'] = toUTC(v).isoformat() if k in ('hostname', 'source_host', 'host'): - returndict[u'hostname'] = toUnicode(v) + returndict['hostname'] = toUnicode(v) if k in ('tags'): if len(v) > 0: - returndict[u'tags'] = v + returndict['tags'] = v # nxlog keeps the severity name in syslogseverity,everyone else should use severity or level. if k in ('syslogseverity', 'severity', 'severityvalue', 'level'): - returndict[u'severity'] = toUnicode(v).upper() + returndict['severity'] = toUnicode(v).upper() if k in ('facility', 'syslogfacility','source'): - returndict[u'source'] = toUnicode(v) + returndict['source'] = toUnicode(v) if k in ('pid', 'processid'): - returndict[u'processid'] = toUnicode(v) + returndict['processid'] = toUnicode(v) # nxlog sets sourcename to the processname (i.e. sshd), everyone else should call it process name or pname if k in ('pname', 'processname', 'sourcename'): - returndict[u'processname'] = toUnicode(v) + returndict['processname'] = toUnicode(v) # the file, or source if k in ('path', 'logger', 'file'): - returndict[u'eventsource'] = toUnicode(v) + returndict['eventsource'] = toUnicode(v) if k in ('type', 'eventtype', 'category'): - returndict[u'category'] = toUnicode(v) + returndict['category'] = toUnicode(v) # custom fields as a list/array if k in ('fields', 'details'): if type(v) is not dict: - returndict[u'details'][u'message'] = v + returndict['details']['message'] = v else: if len(v) > 0: for details_key, details_value in v.items(): - returndict[u'details'][details_key] = details_value + returndict['details'][details_key] = details_value # custom fields/details as a one off, not in an array # i.e. fields.something=value or details.something=value @@ -124,17 +124,17 @@ def keyMapping(aDict): # we let them dictate the data type with field_datatype # convention if newName.endswith('_int'): - returndict[u'details'][str(newName)] = int(v) + returndict['details'][str(newName)] = int(v) elif newName.endswith('_float'): - returndict[u'details'][str(newName)] = float(v) + returndict['details'][str(newName)] = float(v) else: - returndict[u'details'][str(newName)] = toUnicode(v) + returndict['details'][str(newName)] = toUnicode(v) # nxlog windows log handling if 'Domain' in aDict and 'SourceModuleType' in aDict: # nxlog parses all windows event fields very well # copy all fields to details - returndict[u'details'][k] = v + returndict['details'][k] = v if 'utctimestamp' not in returndict: # default in case we don't find a reasonable timestamp diff --git a/mq/plugins/broFixup.py b/mq/plugins/broFixup.py index 9944e1bf0..73883b387 100644 --- a/mq/plugins/broFixup.py +++ b/mq/plugins/broFixup.py @@ -50,7 +50,7 @@ def __init__(self): self.registration = ['bro'] self.priority = 5 try: - self.mozdefhostname = u'{0}'.format(node()) + self.mozdefhostname = '{0}'.format(node()) except: self.mozdefhostname = 'failed to fetch mozdefhostname' pass @@ -59,11 +59,11 @@ def onMessage(self, message, metadata): # make sure I really wanted to see this message # bail out early if not - if u'customendpoint' not in message: + if 'customendpoint' not in message: return message, metadata - if u'category' not in message: + if 'category' not in message: return message, metadata - if u'SOURCE' not in message: + if 'SOURCE' not in message: return message, metadata if message['category'] != 'bro': return message, metadata @@ -97,30 +97,30 @@ def onMessage(self, message, metadata): # add mandatory fields if 'ts' in newmessage['details']: - newmessage[u'utctimestamp'] = toUTC(float(newmessage['details']['ts'])).isoformat() - newmessage[u'timestamp'] = toUTC(float(newmessage['details']['ts'])).isoformat() + newmessage['utctimestamp'] = toUTC(float(newmessage['details']['ts'])).isoformat() + newmessage['timestamp'] = toUTC(float(newmessage['details']['ts'])).isoformat() # del(newmessage['details']['ts']) else: # a malformed message somehow managed to crawl to us, let's put it somewhat together - newmessage[u'utctimestamp'] = toUTC(datetime.now()).isoformat() - newmessage[u'timestamp'] = toUTC(datetime.now()).isoformat() + newmessage['utctimestamp'] = toUTC(datetime.now()).isoformat() + newmessage['timestamp'] = toUTC(datetime.now()).isoformat() - newmessage[u'receivedtimestamp'] = toUTC(datetime.now()).isoformat() - newmessage[u'eventsource'] = u'nsm' - newmessage[u'severity'] = u'INFO' - newmessage[u'mozdefhostname'] = self.mozdefhostname + newmessage['receivedtimestamp'] = toUTC(datetime.now()).isoformat() + newmessage['eventsource'] = 'nsm' + newmessage['severity'] = 'INFO' + newmessage['mozdefhostname'] = self.mozdefhostname if 'id.orig_h' in newmessage['details']: - newmessage[u'details'][u'sourceipaddress'] = newmessage['details']['id.orig_h'] + newmessage['details']['sourceipaddress'] = newmessage['details']['id.orig_h'] del(newmessage['details']['id.orig_h']) if 'id.orig_p' in newmessage['details']: - newmessage[u'details'][u'sourceport'] = newmessage['details']['id.orig_p'] + newmessage['details']['sourceport'] = newmessage['details']['id.orig_p'] del(newmessage['details']['id.orig_p']) if 'id.resp_h' in newmessage['details']: - newmessage[u'details'][u'destinationipaddress'] = newmessage['details']['id.resp_h'] + newmessage['details']['destinationipaddress'] = newmessage['details']['id.resp_h'] del(newmessage['details']['id.resp_h']) if 'id.resp_p' in newmessage['details']: - newmessage[u'details'][u'destinationport'] = newmessage['details']['id.resp_p'] + newmessage['details']['destinationport'] = newmessage['details']['id.resp_p'] del(newmessage['details']['id.resp_p']) if 'details' in newmessage: @@ -134,100 +134,100 @@ def onMessage(self, message, metadata): # All Bro logs need special treatment, so we provide it # Not a known log source? Mark it as such and return if 'source' not in newmessage: - newmessage['source'] = u'unknown' + newmessage['source'] = 'unknown' return newmessage, metadata else: logtype = newmessage['source'] if logtype == 'conn': - newmessage[u'details'][u'originipbytes'] = newmessage['details']['orig_ip_bytes'] - newmessage[u'details'][u'responseipbytes'] = newmessage['details']['resp_ip_bytes'] + newmessage['details']['originipbytes'] = newmessage['details']['orig_ip_bytes'] + newmessage['details']['responseipbytes'] = newmessage['details']['resp_ip_bytes'] del(newmessage['details']['orig_ip_bytes']) del(newmessage['details']['resp_ip_bytes']) if 'history' not in newmessage['details']: - newmessage['details'][u'history'] = '' - newmessage[u'summary'] = ( - u'{sourceipaddress}:'+ - u'{sourceport} -> '+ - u'{destinationipaddress}:' - u'{destinationport} '+ - u'{history} '+ - u'{originipbytes} bytes / ' - u'{responseipbytes} bytes' + newmessage['details']['history'] = '' + newmessage['summary'] = ( + '{sourceipaddress}:'+ + '{sourceport} -> '+ + '{destinationipaddress}:' + '{destinationport} '+ + '{history} '+ + '{originipbytes} bytes / ' + '{responseipbytes} bytes' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'files': if 'rx_hosts' in newmessage['details']: - newmessage['details'][u'sourceipaddress'] = u'{0}'.format(newmessage['details']['rx_hosts'][0]) + newmessage['details']['sourceipaddress'] = '{0}'.format(newmessage['details']['rx_hosts'][0]) if 'tx_hosts' in newmessage['details']: - newmessage['details'][u'destinationipaddress'] = u'{0}'.format(newmessage['details']['tx_hosts'][0]) + newmessage['details']['destinationipaddress'] = '{0}'.format(newmessage['details']['tx_hosts'][0]) if 'mime_type' not in newmessage['details']: - newmessage['details'][u'mime_type'] = u'unknown' + newmessage['details']['mime_type'] = 'unknown' if 'filename' not in newmessage['details']: - newmessage['details'][u'filename'] = u'unknown' + newmessage['details']['filename'] = 'unknown' if 'total_bytes' not in newmessage['details']: - newmessage['details'][u'total_bytes'] = u'0' + newmessage['details']['total_bytes'] = '0' if 'md5' not in newmessage['details']: - newmessage['details'][u'md5'] = u'None' + newmessage['details']['md5'] = 'None' if 'filesource' not in newmessage['details']: - newmessage['details'][u'filesource'] = u'None' - newmessage[u'summary'] = ( - u'{rx_hosts[0]} ' - u'downloaded (MD5) ' - u'{md5} ' - u'MIME {mime_type} ' - u'({total_bytes} bytes) ' - u'from {tx_hosts[0]} ' - u'via {filesource}' + newmessage['details']['filesource'] = 'None' + newmessage['summary'] = ( + '{rx_hosts[0]} ' + 'downloaded (MD5) ' + '{md5} ' + 'MIME {mime_type} ' + '({total_bytes} bytes) ' + 'from {tx_hosts[0]} ' + 'via {filesource}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'dns': if 'qtype_name' not in newmessage['details']: - newmessage['details'][u'qtype_name'] = u'unknown' + newmessage['details']['qtype_name'] = 'unknown' if 'query' not in newmessage['details']: - newmessage['details'][u'query'] = u'' + newmessage['details']['query'] = '' if 'rcode_name' not in newmessage['details']: - newmessage['details'][u'rcode_name'] = u'' - newmessage[u'summary'] = ( - u'DNS {qtype_name} type query ' - u'{sourceipaddress} -> ' - u'{destinationipaddress}:{destinationport}' + newmessage['details']['rcode_name'] = '' + newmessage['summary'] = ( + 'DNS {qtype_name} type query ' + '{sourceipaddress} -> ' + '{destinationipaddress}:{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'http': if 'method' not in newmessage['details']: - newmessage['details'][u'method'] = u'' + newmessage['details']['method'] = '' if 'host' not in newmessage['details']: - newmessage['details'][u'host'] = u'' + newmessage['details']['host'] = '' if 'uri' not in newmessage['details']: - newmessage['details'][u'uri'] = u'' - newmessage['details'][u'uri'] = newmessage['details'][u'uri'][:1024] + newmessage['details']['uri'] = '' + newmessage['details']['uri'] = newmessage['details']['uri'][:1024] if 'status_code' not in newmessage['details']: - newmessage['details'][u'status_code'] = u'' - newmessage[u'summary'] = ( - u'HTTP {method} ' - u'{sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport}' + newmessage['details']['status_code'] = '' + newmessage['summary'] = ( + 'HTTP {method} ' + '{sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'ssl': if 'server_name' not in newmessage['details']: # fake it till you make it - newmessage['details'][u'server_name'] = newmessage['details']['destinationipaddress'] - newmessage[u'summary'] = ( - u'SSL: {sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport}' + newmessage['details']['server_name'] = newmessage['details']['destinationipaddress'] + newmessage['summary'] = ( + 'SSL: {sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'dhcp': - newmessage[u'summary'] = ( + newmessage['summary'] = ( '{assigned_ip} assigned to ' '{mac}' ).format(**newmessage['details']) @@ -235,13 +235,13 @@ def onMessage(self, message, metadata): if logtype == 'ftp': if 'command' not in newmessage['details']: - newmessage['details'][u'command'] = u'' + newmessage['details']['command'] = '' if 'user' not in newmessage['details']: - newmessage['details'][u'user'] = u'' - newmessage[u'summary'] = ( - u'FTP: {sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport}' + newmessage['details']['user'] = '' + newmessage['summary'] = ( + 'FTP: {sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) @@ -250,42 +250,42 @@ def onMessage(self, message, metadata): newmessage['details']['os'] = '' if 'subsystem' not in newmessage['details']: newmessage['details']['subsystem'] = '' - newmessage[u'summary'] = ( - u'PE file: {os} ' - u'{subsystem}' + newmessage['summary'] = ( + 'PE file: {os} ' + '{subsystem}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'smtp': - newmessage[u'summary'] = ( - u'SMTP: {sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport}' + newmessage['summary'] = ( + 'SMTP: {sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'ssh': summary = ( - u'SSH: {sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport}' + 'SSH: {sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport}' ).format(**newmessage['details']) if 'auth_success' in newmessage['details']: - summary += u' success {0}'.format(newmessage['details']['auth_success']) - newmessage[u'summary'] = summary + summary += ' success {0}'.format(newmessage['details']['auth_success']) + newmessage['summary'] = summary return (newmessage, metadata) if logtype == 'tunnel': if 'tunnel_type' not in newmessage['details']: - newmessage['details'][u'tunnel_type'] = u'' + newmessage['details']['tunnel_type'] = '' if 'action' not in newmessage['details']: - newmessage['details'][u'action'] = u'' - newmessage[u'summary'] = ( - u'{sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport} ' - u'{tunnel_type} ' - u'{action}' + newmessage['details']['action'] = '' + newmessage['summary'] = ( + '{sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport} ' + '{tunnel_type} ' + '{action}' ).format(**newmessage['details']) return (newmessage, metadata) @@ -294,50 +294,50 @@ def onMessage(self, message, metadata): newmessage['details']['seenindicator'] = newmessage['details']['seen.indicator'] del(newmessage['details']['seen.indicator']) else: - newmessage['details'][u'seenindicator'] = u'' + newmessage['details']['seenindicator'] = '' if 'seen.node' in newmessage['details']: - newmessage['details'][u'seennode'] = newmessage['details']['seen.node'] + newmessage['details']['seennode'] = newmessage['details']['seen.node'] del(newmessage['details']['seen.node']) if 'seen.where' in newmessage['details']: - newmessage['details'][u'seenwhere'] = newmessage['details']['seen.where'] + newmessage['details']['seenwhere'] = newmessage['details']['seen.where'] del(newmessage['details']['seen.where']) if 'seen.indicator_type' in newmessage['details']: - newmessage['details'][u'seenindicatortype'] = newmessage['details']['seen.indicator_type'] + newmessage['details']['seenindicatortype'] = newmessage['details']['seen.indicator_type'] del(newmessage['details']['seen.indicator_type']) - newmessage[u'summary'] = ( - u'Bro intel match ' - u'of {seenindicatortype} ' - u'in {seenwhere}' + newmessage['summary'] = ( + 'Bro intel match ' + 'of {seenindicatortype} ' + 'in {seenwhere}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'known_certs': if 'serial' not in newmessage['details']: - newmessage['details'][u'serial'] = u'0' - newmessage[u'summary'] = ( - u'Certificate X509 seen from: ' - u'{host}:' - u'{port_num}' + newmessage['details']['serial'] = '0' + newmessage['summary'] = ( + 'Certificate X509 seen from: ' + '{host}:' + '{port_num}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'known_devices': if 'mac' not in newmessage['details']: - newmessage['details'][u'mac'] = u'' + newmessage['details']['mac'] = '' if 'dhcp_host_name' not in newmessage['details']: - newmessage['details'][u'dhcp_host_name'] = u'' - newmessage[u'summary'] = ( - u'New host: ' - u'{mac}' + newmessage['details']['dhcp_host_name'] = '' + newmessage['summary'] = ( + 'New host: ' + '{mac}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'known_hosts': if 'host' not in newmessage['details']: - newmessage['details'][u'host'] = u'' - newmessage[u'summary'] = ( - u'New host: ' - u'{host}' + newmessage['details']['host'] = '' + newmessage['summary'] = ( + 'New host: ' + '{host}' ).format(**newmessage['details']) return (newmessage, metadata) @@ -345,31 +345,31 @@ def onMessage(self, message, metadata): if 'service' not in newmessage['details']: newmessage['details']['service'] = [] if not newmessage['details']['service']: - newmessage['details'][u'service'] = [u'Unknown'] + newmessage['details']['service'] = ['Unknown'] if 'host' not in newmessage['details']: - newmessage['details'][u'host'] = u'unknown' + newmessage['details']['host'] = 'unknown' if 'port_num' not in newmessage['details']: - newmessage['details'][u'port_num'] = u'0' + newmessage['details']['port_num'] = '0' if 'port_proto' not in newmessage['details']: - newmessage['details'][u'port_proto'] = u'' - newmessage[u'summary'] = ( - u'New service: ' - u'{service[0]} ' - u'on host ' - u'{host}:' - u'{port_num} / ' - u'{port_proto}' + newmessage['details']['port_proto'] = '' + newmessage['summary'] = ( + 'New service: ' + '{service[0]} ' + 'on host ' + '{host}:' + '{port_num} / ' + '{port_proto}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'notice': - newmessage['details'][u'indicators'] = [] + newmessage['details']['indicators'] = [] if 'sub' not in newmessage['details']: - newmessage['details'][u'sub'] = u'' + newmessage['details']['sub'] = '' if 'msg' not in newmessage['details']: - newmessage['details'][u'msg'] = u'' + newmessage['details']['msg'] = '' if 'note' not in newmessage['details']: - newmessage['details'][u'note'] = u'' + newmessage['details']['note'] = '' # clean up the action notice IP addresses if 'actions' in newmessage['details']: if newmessage['details']['actions'] == "Notice::ACTION_LOG": @@ -379,82 +379,82 @@ def onMessage(self, message, metadata): # remove the details.src field and add it to indicators # as it may not be the actual source. if 'src' in newmessage['details']: - if isIPv4(newmessage[u'details'][u'src']): - newmessage[u'details'][u'indicators'].append(newmessage[u'details'][u'src']) + if isIPv4(newmessage['details']['src']): + newmessage['details']['indicators'].append(newmessage['details']['src']) # If details.src is present overwrite the source IP address with it - newmessage[u'details'][u'sourceipaddress'] = newmessage[u'details'][u'src'] - newmessage[u'details'][u'sourceipv4address'] = newmessage[u'details'][u'src'] - if isIPv6(newmessage[u'details'][u'src']): - newmessage[u'details'][u'indicators'].append(newmessage[u'details'][u'src']) + newmessage['details']['sourceipaddress'] = newmessage['details']['src'] + newmessage['details']['sourceipv4address'] = newmessage['details']['src'] + if isIPv6(newmessage['details']['src']): + newmessage['details']['indicators'].append(newmessage['details']['src']) # If details.src is present overwrite the source IP address with it - newmessage[u'details'][u'sourceipv6address'] = newmessage[u'details'][u'src'] - del newmessage[u'details'][u'src'] + newmessage['details']['sourceipv6address'] = newmessage['details']['src'] + del newmessage['details']['src'] sumstruct = {} - sumstruct['note'] = newmessage['details'][u'note'] + sumstruct['note'] = newmessage['details']['note'] if 'sourceipv6address' in newmessage['details']: sumstruct['src'] = newmessage['details']['sourceipv6address'] else: if 'sourceipv4address' in newmessage['details']: sumstruct['src'] = newmessage['details']['sourceipv4address'] else: - sumstruct['src'] = u'unknown' + sumstruct['src'] = 'unknown' if 'dst' in newmessage['details']: sumstruct['dst'] = newmessage['details']['dst'] - del(newmessage[u'details'][u'dst']) - if isIPv4(sumstruct[u'dst']): - newmessage['details'][u'destinationipaddress'] = sumstruct['dst'] - newmessage['details'][u'destinationipv4address'] = sumstruct['dst'] - if isIPv6(sumstruct[u'dst']): - newmessage['details'][u'destinationipv6address'] = sumstruct['dst'] + del(newmessage['details']['dst']) + if isIPv4(sumstruct['dst']): + newmessage['details']['destinationipaddress'] = sumstruct['dst'] + newmessage['details']['destinationipv4address'] = sumstruct['dst'] + if isIPv6(sumstruct['dst']): + newmessage['details']['destinationipv6address'] = sumstruct['dst'] else: - sumstruct['dst'] = u'unknown' + sumstruct['dst'] = 'unknown' if 'p' in newmessage['details']: sumstruct['p'] = newmessage['details']['p'] else: - sumstruct['p'] = u'unknown' - newmessage[u'summary'] = ( - u'{note} ' - u'source {src} ' - u'destination {dst} ' - u'port {p}' + sumstruct['p'] = 'unknown' + newmessage['summary'] = ( + '{note} ' + 'source {src} ' + 'destination {dst} ' + 'port {p}' ).format(**sumstruct) # Thank you for your service return (newmessage, metadata) if logtype == 'rdp': if 'cookie' not in newmessage['details']: - newmessage['details'][u'cookie'] = u'unknown' - newmessage[u'summary'] = ( - u'RDP: {sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport}' + newmessage['details']['cookie'] = 'unknown' + newmessage['summary'] = ( + 'RDP: {sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'sip': if 'status_msg' not in newmessage['details']: - newmessage['details'][u'status_msg'] = u'unknown' + newmessage['details']['status_msg'] = 'unknown' if 'uri' not in newmessage['details']: - newmessage['details'][u'uri'] = u'unknown' + newmessage['details']['uri'] = 'unknown' if 'method' not in newmessage['details']: - newmessage['details'][u'method'] = u'unknown' - newmessage[u'summary'] = ( - u'SIP: {sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport} ' - u'method {method} ' - u'status {status_msg}' + newmessage['details']['method'] = 'unknown' + newmessage['summary'] = ( + 'SIP: {sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport} ' + 'method {method} ' + 'status {status_msg}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'software': newmessage['details']['parsed_version'] = {} if 'name' not in newmessage['details']: - newmessage['details'][u'name'] = u'unparsed' + newmessage['details']['name'] = 'unparsed' if 'software_type' not in newmessage['details']: - newmessage['details'][u'software_type'] = u'unknown' + newmessage['details']['software_type'] = 'unknown' if 'host' not in newmessage['details']: - newmessage['details'] = u'' + newmessage['details'] = '' if 'version.addl' in newmessage['details']: newmessage['details']['parsed_version']['addl'] = newmessage['details']['version.addl'] del(newmessage['details']['version.addl']) @@ -470,96 +470,96 @@ def onMessage(self, message, metadata): if 'version.minor3' in newmessage['details']: newmessage['details']['parsed_version']['minor3'] = newmessage['details']['version.minor3'] del(newmessage['details']['version.minor3']) - newmessage[u'summary'] = ( - u'Found {software_type} software ' - u'on {host}' + newmessage['summary'] = ( + 'Found {software_type} software ' + 'on {host}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'socks': if 'version' not in newmessage['details']: - newmessage['details'][u'version'] = u'0' + newmessage['details']['version'] = '0' if 'status' not in newmessage['details']: - newmessage['details'][u'status'] = u'unknown' - newmessage[u'summary'] = ( - u'SOCKSv{version}: ' - u'{sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport} ' - u'status {status}' + newmessage['details']['status'] = 'unknown' + newmessage['summary'] = ( + 'SOCKSv{version}: ' + '{sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport} ' + 'status {status}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'dce_rpc': if 'endpoint' not in newmessage['details']: - newmessage['details'][u'endpoint'] = u'unknown' + newmessage['details']['endpoint'] = 'unknown' if 'operation' not in newmessage['details']: - newmessage['details'][u'operation'] = u'unknown' - newmessage[u'summary'] = ( - u'DCERPC: {sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport}' + newmessage['details']['operation'] = 'unknown' + newmessage['summary'] = ( + 'DCERPC: {sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'kerberos': if 'request_type' not in newmessage['details']: - newmessage['details'][u'request_type'] = u'unknown' + newmessage['details']['request_type'] = 'unknown' if 'client' not in newmessage['details']: - newmessage['details'][u'client'] = u'unknown' + newmessage['details']['client'] = 'unknown' if 'service' not in newmessage['details']: - newmessage['details'][u'service'] = u'unknown' + newmessage['details']['service'] = 'unknown' if 'success' not in newmessage['details']: - newmessage['details'][u'success'] = u'unknown' + newmessage['details']['success'] = 'unknown' if 'error_msg' not in newmessage['details']: - newmessage['details'][u'error_msg'] = u'' - newmessage[u'summary'] = ( - u'{sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport} ' - u'request {request_type} ' - u'success {success}' + newmessage['details']['error_msg'] = '' + newmessage['summary'] = ( + '{sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport} ' + 'request {request_type} ' + 'success {success}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'ntlm': - newmessage['details'][u'ntlm'] = {} + newmessage['details']['ntlm'] = {} if 'domainname' in newmessage['details']: - newmessage['details'][u'ntlm'][u'domainname'] = newmessage['details']['domainname'] + newmessage['details']['ntlm']['domainname'] = newmessage['details']['domainname'] del(newmessage['details']['domainname']) else: - newmessage['details'][u'ntlm'][u'domainname'] = u'unknown' + newmessage['details']['ntlm']['domainname'] = 'unknown' if 'hostname' in newmessage['details']: - newmessage['details'][u'ntlm'][u'hostname'] = newmessage['details']['hostname'] + newmessage['details']['ntlm']['hostname'] = newmessage['details']['hostname'] del(newmessage['details']['hostname']) else: - newmessage['details'][u'ntlm'][u'hostname'] = u'unknown' + newmessage['details']['ntlm']['hostname'] = 'unknown' if 'username' in newmessage['details']: - newmessage['details'][u'ntlm'][u'username'] = newmessage['details']['username'] + newmessage['details']['ntlm']['username'] = newmessage['details']['username'] del(newmessage['details']['username']) else: - newmessage['details'][u'ntlm'][u'username'] = u'unknown' + newmessage['details']['ntlm']['username'] = 'unknown' if 'success' not in newmessage['details']: - newmessage['details'][u'success'] = u'unknown' + newmessage['details']['success'] = 'unknown' if 'status' not in newmessage['details']: - newmessage['details'][u'status'] = u'unknown' - newmessage[u'summary'] = ( - u'NTLM: {sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport} ' - u'success {success} ' - u'status {status}' + newmessage['details']['status'] = 'unknown' + newmessage['summary'] = ( + 'NTLM: {sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport} ' + 'success {success} ' + 'status {status}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'smb_files': newmessage['details']['smbtimes'] = {} if 'path' not in newmessage['details']: - newmessage['details'][u'path'] = u'' + newmessage['details']['path'] = '' if 'name' not in newmessage['details']: - newmessage['details'][u'name'] = u'' + newmessage['details']['name'] = '' if 'action' not in newmessage['details']: - newmessage['details'][u'action'] = u'' + newmessage['details']['action'] = '' if 'times.accessed' in newmessage['details']: newmessage['details']['smbtimes']['accessed'] = toUTC(float(newmessage['details']['times.accessed'])).isoformat() del(newmessage['details']['times.accessed']) @@ -572,32 +572,32 @@ def onMessage(self, message, metadata): if 'times.modified' in newmessage['details']: newmessage['details']['smbtimes']['modified'] = toUTC(float(newmessage['details']['times.modified'])).isoformat() del(newmessage['details']['times.modified']) - newmessage[u'summary'] = ( + newmessage['summary'] = ( 'SMB file: ' - u'{sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport} ' - u'{action}' + '{sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport} ' + '{action}' ).format(**newmessage['details']) return(newmessage, metadata) if logtype == 'smb_mapping': if 'share_type' not in newmessage['details']: - newmessage['details'][u'share_type'] = u'' + newmessage['details']['share_type'] = '' if 'path' not in newmessage['details']: - newmessage['details'][u'path'] = u'' - newmessage[u'summary'] = ( + newmessage['details']['path'] = '' + newmessage['summary'] = ( 'SMB mapping: ' - u'{sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport} ' - u'{share_type}' + '{sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport} ' + '{share_type}' ).format(**newmessage['details']) return(newmessage, metadata) if logtype == 'snmp': if 'version' not in newmessage['details']: - newmessage['details'][u'version'] = u'Unknown' + newmessage['details']['version'] = 'Unknown' if 'get_bulk_requests' not in newmessage['details']: newmessage['details']['get_bulk_requests'] = 0 if 'get_requests' not in newmessage['details']: @@ -606,62 +606,62 @@ def onMessage(self, message, metadata): newmessage['details']['set_requests'] = 0 if 'get_responses' not in newmessage['details']: newmessage['details']['get_responses'] = 0 - newmessage['details']['getreqestssum'] = u'{0}'.format(newmessage['details']['get_bulk_requests'] + newmessage['details']['get_requests']) - newmessage[u'summary'] = ( - u'SNMPv{version}: ' - u'{sourceipaddress} -> ' - u'{destinationipaddress}:' - u'{destinationport} ' - u'({getreqestssum} get / ' - u'{set_requests} set requests ' - u'{get_responses} get responses)' + newmessage['details']['getreqestssum'] = '{0}'.format(newmessage['details']['get_bulk_requests'] + newmessage['details']['get_requests']) + newmessage['summary'] = ( + 'SNMPv{version}: ' + '{sourceipaddress} -> ' + '{destinationipaddress}:' + '{destinationport} ' + '({getreqestssum} get / ' + '{set_requests} set requests ' + '{get_responses} get responses)' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'x509': - newmessage['details'][u'certificate'] = {} + newmessage['details']['certificate'] = {} if 'basic_constraints.ca' in newmessage['details']: - newmessage['details'][u'certificate'][u'basic_constraints_ca'] = newmessage['details'][u'basic_constraints.ca'] - del(newmessage['details'][u'basic_constraints.ca']) + newmessage['details']['certificate']['basic_constraints_ca'] = newmessage['details']['basic_constraints.ca'] + del(newmessage['details']['basic_constraints.ca']) if 'basic_constraints.path_len' in newmessage['details']: - newmessage['details'][u'certificate'][u'basic_constraints_path_len'] = newmessage['details'][u'basic_constraints.path_len'] - del(newmessage['details'][u'basic_constraints.path_len']) + newmessage['details']['certificate']['basic_constraints_path_len'] = newmessage['details']['basic_constraints.path_len'] + del(newmessage['details']['basic_constraints.path_len']) if 'certificate.exponent' in newmessage['details']: - newmessage['details'][u'certificate'][u'exponent'] = newmessage['details'][u'certificate.exponent'] - del(newmessage['details'][u'certificate.exponent']) + newmessage['details']['certificate']['exponent'] = newmessage['details']['certificate.exponent'] + del(newmessage['details']['certificate.exponent']) if 'certificate.issuer' in newmessage['details']: - newmessage['details'][u'certificate'][u'issuer'] = newmessage['details'][u'certificate.issuer'] - del(newmessage['details'][u'certificate.issuer']) + newmessage['details']['certificate']['issuer'] = newmessage['details']['certificate.issuer'] + del(newmessage['details']['certificate.issuer']) if 'certificate.key_alg' in newmessage['details']: - newmessage['details'][u'certificate'][u'key_alg'] = newmessage['details'][u'certificate.key_alg'] - del(newmessage['details'][u'certificate.key_alg']) + newmessage['details']['certificate']['key_alg'] = newmessage['details']['certificate.key_alg'] + del(newmessage['details']['certificate.key_alg']) if 'certificate.key_length' in newmessage['details']: - newmessage['details'][u'certificate'][u'key_length'] = newmessage['details'][u'certificate.key_length'] - del(newmessage['details'][u'certificate.key_length']) + newmessage['details']['certificate']['key_length'] = newmessage['details']['certificate.key_length'] + del(newmessage['details']['certificate.key_length']) if 'certificate.key_type' in newmessage['details']: - newmessage['details'][u'certificate'][u'key_type'] = newmessage['details'][u'certificate.key_type'] - del(newmessage['details'][u'certificate.key_type']) + newmessage['details']['certificate']['key_type'] = newmessage['details']['certificate.key_type'] + del(newmessage['details']['certificate.key_type']) if 'certificate.not_valid_after' in newmessage['details']: - newmessage['details'][u'certificate'][u'not_valid_after'] = toUTC(float(newmessage['details'][u'certificate.not_valid_after'])).isoformat() - del(newmessage['details'][u'certificate.not_valid_after']) + newmessage['details']['certificate']['not_valid_after'] = toUTC(float(newmessage['details']['certificate.not_valid_after'])).isoformat() + del(newmessage['details']['certificate.not_valid_after']) if 'certificate.not_valid_before' in newmessage['details']: - newmessage['details'][u'certificate'][u'not_valid_before'] = toUTC(float(newmessage['details'][u'certificate.not_valid_before'])).isoformat() - del(newmessage['details'][u'certificate.not_valid_before']) + newmessage['details']['certificate']['not_valid_before'] = toUTC(float(newmessage['details']['certificate.not_valid_before'])).isoformat() + del(newmessage['details']['certificate.not_valid_before']) if 'certificate.sig_alg' in newmessage['details']: - newmessage['details'][u'certificate'][u'sig_alg'] = newmessage['details'][u'certificate.sig_alg'] - del(newmessage['details'][u'certificate.sig_alg']) + newmessage['details']['certificate']['sig_alg'] = newmessage['details']['certificate.sig_alg'] + del(newmessage['details']['certificate.sig_alg']) if 'certificate.subject' in newmessage['details']: - newmessage['details'][u'certificate'][u'subject'] = newmessage['details'][u'certificate.subject'] - del(newmessage['details'][u'certificate.subject']) + newmessage['details']['certificate']['subject'] = newmessage['details']['certificate.subject'] + del(newmessage['details']['certificate.subject']) if 'certificate.version' in newmessage['details']: - newmessage['details'][u'certificate'][u'version'] = newmessage['details'][u'certificate.version'] - del(newmessage['details'][u'certificate.version']) + newmessage['details']['certificate']['version'] = newmessage['details']['certificate.version'] + del(newmessage['details']['certificate.version']) if 'certificate.serial' in newmessage['details']: - newmessage['details'][u'certificate'][u'serial'] = newmessage['details'][u'certificate.serial'] - del(newmessage['details'][u'certificate.serial']) + newmessage['details']['certificate']['serial'] = newmessage['details']['certificate.serial'] + del(newmessage['details']['certificate.serial']) else: - newmessage['details'][u'certificate'][u'serial'] = u'0' - newmessage[u'summary'] = ( + newmessage['details']['certificate']['serial'] = '0' + newmessage['summary'] = ( 'X509 certificate seen' ).format(**newmessage['details']['certificate']) return (newmessage, metadata) diff --git a/mq/plugins/squidFixup.py b/mq/plugins/squidFixup.py index 063258c80..efce50a81 100644 --- a/mq/plugins/squidFixup.py +++ b/mq/plugins/squidFixup.py @@ -24,7 +24,7 @@ def __init__(self): self.registration = ["squid"] self.priority = 5 try: - self.mozdefhostname = u"{0}".format(node()) + self.mozdefhostname = "{0}".format(node()) except: self.mozdefhostname = "failed to fetch mozdefhostname" pass @@ -66,9 +66,9 @@ def onMessage(self, message, metadata): # make sure I really wanted to see this message # bail out early if not - if u"customendpoint" not in message: + if "customendpoint" not in message: return message, metadata - if u"category" not in message: + if "category" not in message: return message, metadata if message["category"] != "proxy": return message, metadata @@ -79,7 +79,7 @@ def onMessage(self, message, metadata): # Set NSM as type for categorical filtering of events. newmessage["type"] = "squid" - newmessage[u"mozdefhostname"] = self.mozdefhostname + newmessage["mozdefhostname"] = self.mozdefhostname newmessage["details"] = {} # move some fields that are expected at the event 'root' where they belong @@ -89,12 +89,12 @@ def onMessage(self, message, metadata): newmessage["tags"] = message["tags"] if "category" in message: newmessage["category"] = message["category"] - newmessage[u"customendpoint"] = message["customendpoint"] - newmessage[u"source"] = u"unknown" + newmessage["customendpoint"] = message["customendpoint"] + newmessage["source"] = "unknown" if "source" in message: - newmessage[u"source"] = message["source"] + newmessage["source"] = message["source"] if "MESSAGE" in message: - newmessage[u"summary"] = message["MESSAGE"] + newmessage["summary"] = message["MESSAGE"] if newmessage["source"] == "access": # http://www.squid-cache.org/Doc/config/logformat/ @@ -103,42 +103,42 @@ def onMessage(self, message, metadata): line = message["MESSAGE"].strip() tokens = line.split() - newmessage[u"details"][u"duration"] = float(tokens[1]) / 1000.0 - newmessage[u"details"][u"sourceipaddress"] = tokens[2] - newmessage[u"details"][u"sourceport"] = int(self.create_int(tokens[3])) + newmessage["details"]["duration"] = float(tokens[1]) / 1000.0 + newmessage["details"]["sourceipaddress"] = tokens[2] + newmessage["details"]["sourceport"] = int(self.create_int(tokens[3])) if self.isIPv4(tokens[4]): - newmessage[u"details"][u"destinationipaddress"] = tokens[4] + newmessage["details"]["destinationipaddress"] = tokens[4] else: - newmessage[u"details"][u"destinationipaddress"] = u"0.0.0.0" - newmessage[u"details"][u"proxyaction"] = tokens[6] - if newmessage[u"details"][u"proxyaction"] != "TCP_DENIED": - newmessage[u"details"][u"destinationport"] = int(self.create_int(tokens[5])) - newmessage[u"details"][u"host"] = tokens[13] + newmessage["details"]["destinationipaddress"] = "0.0.0.0" + newmessage["details"]["proxyaction"] = tokens[6] + if newmessage["details"]["proxyaction"] != "TCP_DENIED": + newmessage["details"]["destinationport"] = int(self.create_int(tokens[5])) + newmessage["details"]["host"] = tokens[13] else: (fqdn, dstport) = self.tokenize_url(tokens[11]) - newmessage[u"details"][u"destinationport"] = dstport - newmessage[u"details"][u"host"] = fqdn - newmessage[u"details"][u"status"] = tokens[7] - newmessage[u"details"][u"requestsize"] = int(tokens[8]) - newmessage[u"details"][u"responsesize"] = int(tokens[9]) + newmessage["details"]["destinationport"] = dstport + newmessage["details"]["host"] = fqdn + newmessage["details"]["status"] = tokens[7] + newmessage["details"]["requestsize"] = int(tokens[8]) + newmessage["details"]["responsesize"] = int(tokens[9]) method = tokens[10] - newmessage[u"details"][u"method"] = method - newmessage[u"details"][u"destination"] = tokens[11] + newmessage["details"]["method"] = method + newmessage["details"]["destination"] = tokens[11] proto = tokens[12] if proto == "-" and method == "CONNECT": proto = "ssl" - newmessage[u"details"][u"proto"] = proto - newmessage[u"details"][u"mimetype"] = tokens[14] - newmessage[u"utctimestamp"] = ( + newmessage["details"]["proto"] = proto + newmessage["details"]["mimetype"] = tokens[14] + newmessage["utctimestamp"] = ( toUTC(float(tokens[0])) - timedelta(milliseconds=float(tokens[1])) ).isoformat() - newmessage[u"timestamp"] = ( + newmessage["timestamp"] = ( toUTC(float(tokens[0])) - timedelta(milliseconds=float(tokens[1])) ).isoformat() # add mandatory fields - newmessage[u"receivedtimestamp"] = toUTC(datetime.now()).isoformat() - newmessage[u"eventsource"] = u"squid" - newmessage[u"severity"] = u"INFO" + newmessage["receivedtimestamp"] = toUTC(datetime.now()).isoformat() + newmessage["eventsource"] = "squid" + newmessage["severity"] = "INFO" return (newmessage, metadata) diff --git a/mq/plugins/suricataFixup.py b/mq/plugins/suricataFixup.py index fbba2c495..f94fb8dfe 100644 --- a/mq/plugins/suricataFixup.py +++ b/mq/plugins/suricataFixup.py @@ -21,7 +21,7 @@ def __init__(self): self.registration = ['suricata'] self.priority = 5 try: - self.mozdefhostname = u'{0}'.format(node()) + self.mozdefhostname = '{0}'.format(node()) except: self.mozdefhostname = 'failed to fetch mozdefhostname' pass @@ -30,9 +30,9 @@ def onMessage(self, message, metadata): # make sure I really wanted to see this message # bail out early if not - if u'customendpoint' not in message: + if 'customendpoint' not in message: return message, metadata - if u'category' not in message: + if 'category' not in message: return message, metadata if message['category'] != 'suricata': return message, metadata @@ -56,49 +56,49 @@ def onMessage(self, message, metadata): newmessage['tags'] = message['tags'] if 'category' in message: newmessage['category'] = message['category'] - newmessage[u'source'] = u'unknown' + newmessage['source'] = 'unknown' if 'source' in message: - newmessage[u'source'] = message['source'] + newmessage['source'] = message['source'] logtype = newmessage['source'] - newmessage[u'event_type'] = u'unknown' + newmessage['event_type'] = 'unknown' if 'event_type' in message: - newmessage[u'event_type'] = message['event_type'] + newmessage['event_type'] = message['event_type'] eventtype = newmessage['event_type'] # add mandatory fields if 'flow' in newmessage['details']: if 'start' in newmessage['details']['flow']: - newmessage[u'utctimestamp'] = toUTC(newmessage['details']['flow']['start']).isoformat() - newmessage[u'timestamp'] = toUTC(newmessage['details']['flow']['start']).isoformat() + newmessage['utctimestamp'] = toUTC(newmessage['details']['flow']['start']).isoformat() + newmessage['timestamp'] = toUTC(newmessage['details']['flow']['start']).isoformat() else: # a malformed message somehow managed to crawl to us, let's put it somewhat together - newmessage[u'utctimestamp'] = toUTC(datetime.now()).isoformat() - newmessage[u'timestamp'] = toUTC(datetime.now()).isoformat() + newmessage['utctimestamp'] = toUTC(datetime.now()).isoformat() + newmessage['timestamp'] = toUTC(datetime.now()).isoformat() - newmessage[u'receivedtimestamp'] = toUTC(datetime.now()).isoformat() - newmessage[u'eventsource'] = u'nsm' - newmessage[u'severity'] = u'INFO' - newmessage[u'mozdefhostname'] = self.mozdefhostname + newmessage['receivedtimestamp'] = toUTC(datetime.now()).isoformat() + newmessage['eventsource'] = 'nsm' + newmessage['severity'] = 'INFO' + newmessage['mozdefhostname'] = self.mozdefhostname if 'details' in newmessage: - newmessage[u'details'][u'sourceipaddress'] = "0.0.0.0" - newmessage[u'details'][u'destinationipaddress'] = "0.0.0.0" - newmessage[u'details'][u'sourceport'] = 0 - newmessage[u'details'][u'destinationport'] = 0 - if 'alert' in newmessage[u'details']: - newmessage[u'details'][u'suricata_alert'] = newmessage[u'details'][u'alert'] - del(newmessage[u'details'][u'alert']) + newmessage['details']['sourceipaddress'] = "0.0.0.0" + newmessage['details']['destinationipaddress'] = "0.0.0.0" + newmessage['details']['sourceport'] = 0 + newmessage['details']['destinationport'] = 0 + if 'alert' in newmessage['details']: + newmessage['details']['suricata_alert'] = newmessage['details']['alert'] + del(newmessage['details']['alert']) if 'src_ip' in newmessage['details']: - newmessage[u'details'][u'sourceipaddress'] = newmessage['details']['src_ip'] + newmessage['details']['sourceipaddress'] = newmessage['details']['src_ip'] del(newmessage['details']['src_ip']) if 'src_port' in newmessage['details']: - newmessage[u'details'][u'sourceport'] = newmessage['details']['src_port'] + newmessage['details']['sourceport'] = newmessage['details']['src_port'] del(newmessage['details']['src_port']) if 'dest_ip' in newmessage['details']: - newmessage[u'details'][u'destinationipaddress'] = newmessage['details']['dest_ip'] + newmessage['details']['destinationipaddress'] = newmessage['details']['dest_ip'] del(newmessage['details']['dest_ip']) if 'dest_port' in newmessage['details']: - newmessage[u'details'][u'destinationport'] = newmessage['details']['dest_port'] + newmessage['details']['destinationport'] = newmessage['details']['dest_port'] del(newmessage['details']['dest_port']) if 'file_name' in newmessage['details']: @@ -111,59 +111,59 @@ def onMessage(self, message, metadata): if logtype == 'eve-log': if eventtype == 'alert': # Truncate packet, payload and payload_printable to reasonable sizes - if 'packet' in newmessage[u'details']: - newmessage[u'details'][u'packet'] = newmessage[u'details'][u'packet'][0:4095] - if 'payload' in newmessage[u'details']: - newmessage[u'details'][u'payload'] = newmessage[u'details'][u'payload'][0:4095] - if 'payload_printable' in newmessage[u'details']: - newmessage[u'details'][u'payload_printable'] = newmessage[u'details'][u'payload_printable'][0:4095] + if 'packet' in newmessage['details']: + newmessage['details']['packet'] = newmessage['details']['packet'][0:4095] + if 'payload' in newmessage['details']: + newmessage['details']['payload'] = newmessage['details']['payload'][0:4095] + if 'payload_printable' in newmessage['details']: + newmessage['details']['payload_printable'] = newmessage['details']['payload_printable'][0:4095] # Match names to Bro - newmessage[u'details'][u'originipbytes'] = 0 - newmessage[u'details'][u'responseipbytes'] = 0 - newmessage[u'details'][u'orig_pkts'] = 0 - newmessage[u'details'][u'resp_pkts'] = 0 - if 'flow' in newmessage[u'details']: - if 'bytes_toserver' in newmessage[u'details'][u'flow']: - newmessage[u'details'][u'originipbytes'] = newmessage['details']['flow']['bytes_toserver'] + newmessage['details']['originipbytes'] = 0 + newmessage['details']['responseipbytes'] = 0 + newmessage['details']['orig_pkts'] = 0 + newmessage['details']['resp_pkts'] = 0 + if 'flow' in newmessage['details']: + if 'bytes_toserver' in newmessage['details']['flow']: + newmessage['details']['originipbytes'] = newmessage['details']['flow']['bytes_toserver'] del(newmessage['details']['flow']['bytes_toserver']) - if 'bytes_toclient' in newmessage[u'details'][u'flow']: - newmessage[u'details'][u'responseipbytes'] = newmessage['details']['flow']['bytes_toclient'] + if 'bytes_toclient' in newmessage['details']['flow']: + newmessage['details']['responseipbytes'] = newmessage['details']['flow']['bytes_toclient'] del(newmessage['details']['flow']['bytes_toclient']) - if 'pkts_toserver' in newmessage[u'details'][u'flow']: - newmessage[u'details'][u'orig_pkts'] = newmessage['details']['flow']['pkts_toserver'] + if 'pkts_toserver' in newmessage['details']['flow']: + newmessage['details']['orig_pkts'] = newmessage['details']['flow']['pkts_toserver'] del(newmessage['details']['flow']['pkts_toserver']) - if 'pkts_toclient' in newmessage[u'details'][u'flow']: - newmessage[u'details'][u'resp_pkts'] = newmessage['details']['flow']['pkts_toclient'] + if 'pkts_toclient' in newmessage['details']['flow']: + newmessage['details']['resp_pkts'] = newmessage['details']['flow']['pkts_toclient'] del(newmessage['details']['flow']['pkts_toclient']) - if 'http' in newmessage[u'details']: - if 'hostname' in newmessage[u'details'][u'http']: - newmessage[u'details'][u'host'] = newmessage[u'details'][u'http'][u'hostname'] - del(newmessage[u'details'][u'http'][u'hostname']) - if 'http_method' in newmessage[u'details'][u'http']: - newmessage[u'details'][u'method'] = newmessage[u'details'][u'http'][u'http_method'] - del(newmessage[u'details'][u'http'][u'http_method']) - if 'http_user_agent' in newmessage[u'details'][u'http']: - newmessage[u'details'][u'user_agent'] = newmessage[u'details'][u'http'][u'http_user_agent'] - del(newmessage[u'details'][u'http'][u'http_user_agent']) - if 'status' in newmessage[u'details'][u'http']: - newmessage[u'details'][u'status_code'] = newmessage[u'details'][u'http'][u'status'] - del(newmessage[u'details'][u'http'][u'status']) - if 'url' in newmessage[u'details'][u'http']: - newmessage[u'details'][u'uri'] = newmessage[u'details'][u'http'][u'url'] - del(newmessage[u'details'][u'http'][u'url']) - if 'redirect' in newmessage[u'details'][u'http']: - newmessage[u'details'][u'redirect_dst'] = newmessage[u'details'][u'http'][u'redirect'] - del(newmessage[u'details'][u'http'][u'redirect']) - if 'length' in newmessage[u'details'][u'http']: - newmessage[u'details'][u'request_body_len'] = newmessage[u'details'][u'http'][u'length'] - del(newmessage[u'details'][u'http'][u'length']) - if 'http_response_body' in newmessage[u'details'][u'http']: - newmessage[u'details'][u'http_response_body'] = newmessage[u'details'][u'http'][u'http_response_body'][0:4095] - del(newmessage[u'details'][u'http'][u'http_response_body']) - if 'http_response_body_printable' in newmessage[u'details'][u'http']: - newmessage[u'details'][u'http_response_body_printable'] = newmessage[u'details'][u'http'][u'http_response_body_printable'][0:4095] - del(newmessage[u'details'][u'http'][u'http_response_body_printable']) - if 'app_proto' in newmessage[u'details']: + if 'http' in newmessage['details']: + if 'hostname' in newmessage['details']['http']: + newmessage['details']['host'] = newmessage['details']['http']['hostname'] + del(newmessage['details']['http']['hostname']) + if 'http_method' in newmessage['details']['http']: + newmessage['details']['method'] = newmessage['details']['http']['http_method'] + del(newmessage['details']['http']['http_method']) + if 'http_user_agent' in newmessage['details']['http']: + newmessage['details']['user_agent'] = newmessage['details']['http']['http_user_agent'] + del(newmessage['details']['http']['http_user_agent']) + if 'status' in newmessage['details']['http']: + newmessage['details']['status_code'] = newmessage['details']['http']['status'] + del(newmessage['details']['http']['status']) + if 'url' in newmessage['details']['http']: + newmessage['details']['uri'] = newmessage['details']['http']['url'] + del(newmessage['details']['http']['url']) + if 'redirect' in newmessage['details']['http']: + newmessage['details']['redirect_dst'] = newmessage['details']['http']['redirect'] + del(newmessage['details']['http']['redirect']) + if 'length' in newmessage['details']['http']: + newmessage['details']['request_body_len'] = newmessage['details']['http']['length'] + del(newmessage['details']['http']['length']) + if 'http_response_body' in newmessage['details']['http']: + newmessage['details']['http_response_body'] = newmessage['details']['http']['http_response_body'][0:4095] + del(newmessage['details']['http']['http_response_body']) + if 'http_response_body_printable' in newmessage['details']['http']: + newmessage['details']['http_response_body_printable'] = newmessage['details']['http']['http_response_body_printable'][0:4095] + del(newmessage['details']['http']['http_response_body_printable']) + if 'app_proto' in newmessage['details']: newmessage['details']['service'] = newmessage['details']['app_proto'] del(newmessage['details']['app_proto']) # Make sure details.vars.flowbits exceptions are handled @@ -173,11 +173,11 @@ def onMessage(self, message, metadata): if 'ET.http.javaclient.vulnerable': del(newmessage['details']['vars']['flowbits']['ET.http.javaclient']) newmessage['details']['vars']['flowbits']['ET.http.javaclient.vulnerable'] = "True" - newmessage[u'summary'] = ( - u'{sourceipaddress}:'+ - u'{sourceport} -> '+ - u'{destinationipaddress}:' - u'{destinationport}' + newmessage['summary'] = ( + '{sourceipaddress}:'+ + '{sourceport} -> '+ + '{destinationipaddress}:' + '{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) diff --git a/tests/alerts/actions/test_dashboard_geomodel.py b/tests/alerts/actions/test_dashboard_geomodel.py index 33610e715..9869b7693 100644 --- a/tests/alerts/actions/test_dashboard_geomodel.py +++ b/tests/alerts/actions/test_dashboard_geomodel.py @@ -25,12 +25,12 @@ def setup(self): self.good_message_dict = { "category": "geomodel", "tags": ['geomodel'], - "summary": u"ttesterson@mozilla.com NEWCOUNTRY Diamond Bar, United States access from 1.2.3.4 (duo) [deviation:12.07010770457331] last activity was from Ottawa, Canada (3763 km away) approx 23.43 hours before", + "summary": "ttesterson@mozilla.com NEWCOUNTRY Diamond Bar, United States access from 1.2.3.4 (duo) [deviation:12.07010770457331] last activity was from Ottawa, Canada (3763 km away) approx 23.43 hours before", "events": [ { - u'documentsource': { - u'details': { - u'event_time': u'2018-08-08T02:11:41.85Z', + 'documentsource': { + 'details': { + 'event_time': '2018-08-08T02:11:41.85Z', } } } @@ -38,8 +38,8 @@ def setup(self): "details": { "category": "NEWCOUNTRY", 'previous_locality_details': { - u'city': u'Oakland', - u'country': u'United States' + 'city': 'Oakland', + 'country': 'United States' }, "locality_details": { "city": "Diamond Bar", @@ -104,9 +104,9 @@ def test_malformed_message_bad(self): assert self.test_result_record is None def test_str_location(self): - self.good_message_dict['summary'] = u"ttesterson@mozilla.com NEWCOUNTRY \u0107abcd, \xe4Spain access from 1.2.3.4 (duo) [deviation:12.07010770457331] last activity was from Ottawa, Canada (3763 km away) approx 23.43 hours before" - self.good_message_dict['details']['locality_details']['city'] = u'\u0107abcd' - self.good_message_dict['details']['locality_details']['country'] = u'\xe4Spain' + self.good_message_dict['summary'] = "ttesterson@mozilla.com NEWCOUNTRY \u0107abcd, \xe4Spain access from 1.2.3.4 (duo) [deviation:12.07010770457331] last activity was from Ottawa, Canada (3763 km away) approx 23.43 hours before" + self.good_message_dict['details']['locality_details']['city'] = '\u0107abcd' + self.good_message_dict['details']['locality_details']['country'] = '\xe4Spain' assert self.test_result_record is None result_message = self.plugin.onMessage(self.good_message_dict) assert result_message == self.good_message_dict @@ -117,7 +117,7 @@ def test_str_location(self): assert type(result_message['details']['locality_details']['country']) is str def test_str_username(self): - self.good_message_dict['details']['principal'] = u'\xfcttesterson@mozilla.com' + self.good_message_dict['details']['principal'] = '\xfcttesterson@mozilla.com' assert self.test_result_record is None result_message = self.plugin.onMessage(self.good_message_dict) assert result_message == self.good_message_dict @@ -134,8 +134,8 @@ def test_written_details(self): assert self.test_result_record is not None result_db_entry = self.test_result_record assert result_db_entry['details'] == { - 'New IP': u'1.2.3.4 (APNIC Debogon Project, APNIC Pty Ltd)', - 'New Location': u'Diamond Bar, United States', - 'Previous Location': u'Oakland, United States', + 'New IP': '1.2.3.4 (APNIC Debogon Project, APNIC Pty Ltd)', + 'New Location': 'Diamond Bar, United States', + 'Previous Location': 'Oakland, United States', 'Timestamp': 'Wednesday, August 08 2018 02:11 UTC' } diff --git a/tests/alerts/alert_test_suite.py b/tests/alerts/alert_test_suite.py index dc00c8fa9..4e647dcbb 100644 --- a/tests/alerts/alert_test_suite.py +++ b/tests/alerts/alert_test_suite.py @@ -218,7 +218,7 @@ def verify_expected_alert(self, found_alert, test_case): # Verify that the alert properties are set correctly for key, value in test_case.expected_alert.items(): - assert found_alert['_source'][key] == value, u'{0} does not match!\n\tgot: {1}\n\texpected: {2}'.format(key, found_alert['_source'][key], value) + assert found_alert['_source'][key] == value, '{0} does not match!\n\tgot: {1}\n\texpected: {2}'.format(key, found_alert['_source'][key], value) def verify_alert_task(self, alert_task, test_case): assert alert_task.classname() == self.alert_classname, 'Alert classname did not match expected name' diff --git a/tests/alerts/test_feedback_events.py b/tests/alerts/test_feedback_events.py index 8ea481cf3..b21a95b44 100644 --- a/tests/alerts/test_feedback_events.py +++ b/tests/alerts/test_feedback_events.py @@ -31,28 +31,28 @@ class TestAlertFeedbackEvents(AlertTestSuite): } default_event = { "_source": { - 'category': u'user_feedback', + 'category': 'user_feedback', 'details': { - u'action': u'escalate', - u'alert_information': { - u'alert_code': u'123456', - u'alert_id': u'7891011', - u'alert_str_json': json.dumps(inner_alert_dict), - u'date': u'2012-06-15', - u'description': u'This alert is created based on geo ip information about the last login of a user.', - u'duplicate': False, - u'last_update': 1524686938, - u'risk': u'high', - u'state': u'escalate', - u'summary': u'Did you recently login from Montana, Tonga (109.117.1.33)?', - u'url': u'https://www.mozilla.org', - u'url_title': u'Get Help', - u'user_id': u'ad|Mozilla|ttesterson' + 'action': 'escalate', + 'alert_information': { + 'alert_code': '123456', + 'alert_id': '7891011', + 'alert_str_json': json.dumps(inner_alert_dict), + 'date': '2012-06-15', + 'description': 'This alert is created based on geo ip information about the last login of a user.', + 'duplicate': False, + 'last_update': 1524686938, + 'risk': 'high', + 'state': 'escalate', + 'summary': 'Did you recently login from Montana, Tonga (109.117.1.33)?', + 'url': 'https://www.mozilla.org', + 'url_title': 'Get Help', + 'user_id': 'ad|Mozilla|ttesterson' } }, 'mozdefhostname': 'host1', 'severity': 'INFO', - 'summary': u'Did you recently login from Montana, Tonga (109.117.1.33)?', + 'summary': 'Did you recently login from Montana, Tonga (109.117.1.33)?', 'tags': ['SSODashboardAlertFeedback'] } } @@ -86,9 +86,9 @@ class TestAlertFeedbackEvents(AlertTestSuite): ) unicode_event = AlertTestSuite.create_event(default_event) - unicode_event['_source']['details']['alert_information']['user_id'] = u'\xfctest' + unicode_event['_source']['details']['alert_information']['user_id'] = '\xfctest' unicode_alert = AlertTestSuite.create_alert(default_alert) - unicode_alert['summary'] = u'\xfctest escalated alert within single-sign on (SSO) dashboard. Event Date: 2012-06-15 Summary: "Did you recently login from Montana, Tonga (109.117.1.33)?"' + unicode_alert['summary'] = '\xfctest escalated alert within single-sign on (SSO) dashboard. Event Date: 2012-06-15 Summary: "Did you recently login from Montana, Tonga (109.117.1.33)?"' test_cases.append( PositiveAlertTestCase( description="Positive test case with good unicode event", diff --git a/tests/alerts/test_geomodel.py b/tests/alerts/test_geomodel.py index 3cd042ea1..e1bf8aec9 100644 --- a/tests/alerts/test_geomodel.py +++ b/tests/alerts/test_geomodel.py @@ -87,8 +87,8 @@ class TestAlertGeomodel(AlertTestSuite): movement_event = { "_source": { - u'category': u'geomodelnotice', - u'details': { + 'category': 'geomodelnotice', + 'details': { 'category': 'MOVEMENT', }, 'severity': 'NOTICE', @@ -111,33 +111,33 @@ class TestAlertGeomodel(AlertTestSuite): ) unicode_event = AlertTestSuite.create_event(default_event) - unicode_event['_source']['summary'] = u"\xfcttesterson@mozilla.com NEWCOUNTRY \u0107Bar, \u0107United States access from 1.2.3.4 (duo) [deviation:12.07010770457331] last activity was from Ottawa, Canada (3763 km away) approx 23.43 hours before" + unicode_event['_source']['summary'] = "\xfcttesterson@mozilla.com NEWCOUNTRY \u0107Bar, \u0107United States access from 1.2.3.4 (duo) [deviation:12.07010770457331] last activity was from Ottawa, Canada (3763 km away) approx 23.43 hours before" unicode_event['_source']['details']['prev_locality_details'] = { - "city": u"\u0107Toronto", - "country": u"\u0107Canada" + "city": "\u0107Toronto", + "country": "\u0107Canada" } unicode_event['_source']['details']['locality_details'] = { - "city": u"\u0107Bar", - "country": u"\u0107United States" + "city": "\u0107Bar", + "country": "\u0107United States" } - unicode_event['_source']['details']['principal'] = u"\xfcttesterson@mozilla.com" + unicode_event['_source']['details']['principal'] = "\xfcttesterson@mozilla.com" unicode_alert = { "category": "geomodel", "tags": ['geomodel'], "severity": "NOTICE", - "summary": u"\xfcttesterson@mozilla.com NEWCOUNTRY \u0107Bar, \u0107United States access from 1.2.3.4 (duo) [deviation:12.07010770457331] last activity was from Ottawa, Canada (3763 km away) approx 23.43 hours before", + "summary": "\xfcttesterson@mozilla.com NEWCOUNTRY \u0107Bar, \u0107United States access from 1.2.3.4 (duo) [deviation:12.07010770457331] last activity was from Ottawa, Canada (3763 km away) approx 23.43 hours before", "details": { "category": "NEWCOUNTRY", "previous_locality_details": { - "city": u"\u0107Toronto", - "country": u"\u0107Canada" + "city": "\u0107Toronto", + "country": "\u0107Canada" }, "locality_details": { - "city": u"\u0107Bar", - "country": u"\u0107United States" + "city": "\u0107Bar", + "country": "\u0107United States" }, - "principal": u"\xfcttesterson@mozilla.com", + "principal": "\xfcttesterson@mozilla.com", "source_ip": "1.2.3.4" }, "url": "https://www.mozilla.org", diff --git a/tests/mozdef_util/test_elasticsearch_client.py b/tests/mozdef_util/test_elasticsearch_client.py index 937d5fcb4..ee7202291 100644 --- a/tests/mozdef_util/test_elasticsearch_client.py +++ b/tests/mozdef_util/test_elasticsearch_client.py @@ -128,7 +128,7 @@ def test_close_index(self): self.es_client.create_index('test_index') time.sleep(1) closed = self.es_client.close_index('test_index') - assert closed == {u'acknowledged': True} + assert closed == {'acknowledged': True} class TestWritingToClosedIndex(ElasticsearchClientTest): @@ -161,7 +161,7 @@ def test_index_open(self): time.sleep(1) self.es_client.close_index('test_index') opened = self.es_client.open_index('test_index') - assert opened == {u'acknowledged': True, u'shards_acknowledged': True} + assert opened == {'acknowledged': True, 'shards_acknowledged': True} class TestWithBadIndex(ElasticsearchClientTest): diff --git a/tests/mq/plugins/test_broFixup.py b/tests/mq/plugins/test_broFixup.py index 4cde3180c..ceeccca16 100644 --- a/tests/mq/plugins/test_broFixup.py +++ b/tests/mq/plugins/test_broFixup.py @@ -736,12 +736,12 @@ def test_smtp_log(self): "mailfrom":"bugzilla-daemon@mozilla.org", "rcptto":["bugmail@firebot.glob.uno"], "date":"Mon, 18 Sep 2017 02:59:56 +0000", - "from":"\u0022Bugzilla@Mozilla\u0022 ", + "from":"\\u0022Bugzilla@Mozilla\\u0022 ", "to":["bugmail@firebot.glob.uno"], "msg_id":"", - "subject":"[Bug 1400759] New: Debugger script search not working when content type = \u0027image/svg+xml\u0027", - "first_received":"by jobqueue2.bugs.scl3.mozilla.com (Postfix, from userid 0)\u0009id 87345380596; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", - "second_received":"from jobqueue2.bugs.scl3.mozilla.com (jobqueue2.bugs.scl3.mozilla.com [10.22.82.42])\u0009by mx1.mail.scl3.mozilla.com (Postfix) with ESMTPS id 9EBCBC0A97\u0009for ; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", + "subject":"[Bug 1400759] New: Debugger script search not working when content type = \\u0027image/svg+xml\\u0027", + "first_received":"by jobqueue2.bugs.scl3.mozilla.com (Postfix, from userid 0)\\u0009id 87345380596; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", + "second_received":"from jobqueue2.bugs.scl3.mozilla.com (jobqueue2.bugs.scl3.mozilla.com [10.22.82.42])\\u0009by mx1.mail.scl3.mozilla.com (Postfix) with ESMTPS id 9EBCBC0A97\\u0009for ; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", "last_reply":"250 2.0.0 Ok: queued as 3E1EC13F655", "path":["128.199.139.6","63.245.214.155","127.0.0.1","10.22.82.42"], "tls":'false', @@ -775,9 +775,9 @@ def test_smtp_log2(self): "mailfrom":"bugzilla-daemon@mozilla.org", "rcptto":["bugmail@firebot.glob.uno"], "date":"Mon, 18 Sep 2017 02:59:56 +0000", - "subject":"[Bug 1400759] New: Debugger script search not working when content type = \u0027image/svg+xml\u0027", - "first_received":"by jobqueue2.bugs.scl3.mozilla.com (Postfix, from userid 0)\u0009id 87345380596; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", - "second_received":"from jobqueue2.bugs.scl3.mozilla.com (jobqueue2.bugs.scl3.mozilla.com [10.22.82.42])\u0009by mx1.mail.scl3.mozilla.com (Postfix) with ESMTPS id 9EBCBC0A97\u0009for ; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", + "subject":"[Bug 1400759] New: Debugger script search not working when content type = \\u0027image/svg+xml\\u0027", + "first_received":"by jobqueue2.bugs.scl3.mozilla.com (Postfix, from userid 0)\\u0009id 87345380596; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", + "second_received":"from jobqueue2.bugs.scl3.mozilla.com (jobqueue2.bugs.scl3.mozilla.com [10.22.82.42])\\u0009by mx1.mail.scl3.mozilla.com (Postfix) with ESMTPS id 9EBCBC0A97\\u0009for ; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", "last_reply":"250 2.0.0 Ok: queued as 3E1EC13F655", "path":["128.199.139.6","63.245.214.155","127.0.0.1","10.22.82.42"], "tls":'false', @@ -804,13 +804,13 @@ def test_smtp_unicode(self): } message = { - u'from': u'"Test from field\xe2\x80\x99s here" ', - u'id.orig_h': u'1.2.3.4', - u'id.orig_p': 47311, - u'id.resp_h': u'5.6.7.8', - u'id.resp_p': 25, - u'subject': u'Example subject of email\xe2\x80\x99s', - u'ts': 1531818582.216429, + 'from': '"Test from field\xe2\x80\x99s here" ', + 'id.orig_h': '1.2.3.4', + 'id.orig_p': 47311, + 'id.resp_h': '5.6.7.8', + 'id.resp_p': 25, + 'subject': 'Example subject of email\xe2\x80\x99s', + 'ts': 1531818582.216429, } event['MESSAGE'] = json.dumps(message) @@ -820,8 +820,8 @@ def test_smtp_unicode(self): self.verify_metadata(metadata) assert toUTC(message['ts']).isoformat() == result['utctimestamp'] assert toUTC(message['ts']).isoformat() == result['timestamp'] - assert result['details']['from'] == u'"Test from field\xe2\x80\x99s here" ' - assert result['details']['subject'] == u'Example subject of email\xe2\x80\x99s' + assert result['details']['from'] == '"Test from field\xe2\x80\x99s here" ' + assert result['details']['subject'] == 'Example subject of email\xe2\x80\x99s' def test_ssh_log(self): event = { @@ -1752,7 +1752,7 @@ def test_dcerpc_log(self): "id.resp_h":"10.22.69.21", "id.resp_p":445, "rtt":0.001135, - "named_pipe":"\u005cpipe\u005clsass", + "named_pipe":"\\u005cpipe\\u005clsass", "endpoint":"samr", "operation":"SamrEnumerateDomainsInSamServer" } @@ -1783,7 +1783,7 @@ def test_dcerpc_log2(self): "id.resp_h":"10.22.69.21", "id.resp_p":445, "rtt":0.001135, - "named_pipe":"\u005cpipe\u005clsass" + "named_pipe":"\\u005cpipe\\u005clsass" } event['MESSAGE'] = json.dumps(MESSAGE) @@ -1977,8 +1977,8 @@ def test_smbfiles_log(self): "id.resp_h":"10.22.69.21", "id.resp_p":445, "action":"SMB::FILE_OPEN", - "name":"releng.ad.mozilla.com\u005cPolicies\u005c{8614FE9A-333C-47C1-9EFD-856B4DF64883}\u005cMachine\u005cPreferences\u005cScheduledTasks", - "path":"\u005c\u005cDC8.releng.ad.mozilla.com\u005cSysVol", + "name":"releng.ad.mozilla.com\\u005cPolicies\\u005c{8614FE9A-333C-47C1-9EFD-856B4DF64883}\\u005cMachine\\u005cPreferences\\u005cScheduledTasks", + "path":"\\u005c\\u005cDC8.releng.ad.mozilla.com\\u005cSysVol", "size":4096, "times.modified":1401486067.13068, "times.accessed":1401486067.13068, @@ -2058,7 +2058,7 @@ def test_smbmapping_log(self): "id.orig_p":49720, "id.resp_h":"10.22.69.18", "id.resp_p":445, - "path":"\u005c\u005cDC6\u005cSYSVOL", + "path":"\\u005c\\u005cDC6\\u005cSYSVOL", "share_type":"DISK" } event['MESSAGE'] = json.dumps(MESSAGE) diff --git a/tests/mq/plugins/test_suricataFixup.py b/tests/mq/plugins/test_suricataFixup.py index 7d4322440..a6d0eb8d4 100644 --- a/tests/mq/plugins/test_suricataFixup.py +++ b/tests/mq/plugins/test_suricataFixup.py @@ -569,10 +569,10 @@ def test_eve_log_alert_rename(self): self.verify_metadata(metadata) assert 'suricata_alert' in result['details'] assert 'alert' not in result['details'] - assert result['details'][u'suricata_alert'][u'action'] == MESSAGE['alert']['action'] - assert result['details'][u'suricata_alert'][u'gid'] == MESSAGE['alert']['gid'] - assert result['details'][u'suricata_alert'][u'rev'] == MESSAGE['alert']['rev'] - assert result['details'][u'suricata_alert'][u'signature_id'] == MESSAGE['alert']['signature_id'] - assert result['details'][u'suricata_alert'][u'signature'] == MESSAGE['alert']['signature'] - assert result['details'][u'suricata_alert'][u'category'] == MESSAGE['alert']['category'] - assert result['details'][u'suricata_alert'][u'severity'] == MESSAGE['alert']['severity'] + assert result['details']['suricata_alert']['action'] == MESSAGE['alert']['action'] + assert result['details']['suricata_alert']['gid'] == MESSAGE['alert']['gid'] + assert result['details']['suricata_alert']['rev'] == MESSAGE['alert']['rev'] + assert result['details']['suricata_alert']['signature_id'] == MESSAGE['alert']['signature_id'] + assert result['details']['suricata_alert']['signature'] == MESSAGE['alert']['signature'] + assert result['details']['suricata_alert']['category'] == MESSAGE['alert']['category'] + assert result['details']['suricata_alert']['severity'] == MESSAGE['alert']['severity'] diff --git a/tests/mq/test_esworker_eventtask.py b/tests/mq/test_esworker_eventtask.py index bb642a9d8..e8d56a673 100644 --- a/tests/mq/test_esworker_eventtask.py +++ b/tests/mq/test_esworker_eventtask.py @@ -37,20 +37,20 @@ def setup(self): def test_syslog_dict(self): syslog_dict = { - u'CATEGORY': 'syslog', - u'DATE': u'Oct 27 14:01:12', - u'FACILITY': u'daemon', - u'HOST': u'ub_server', - u'HOST_FROM': u'10.1.20.139', - u'LEGACY_MSGHDR': u'systemd[1]: ', - u'MESSAGE': u'Stopped Getty on tty1.', - u'PID': u'1', - u'PRIORITY': u'info', - u'PROGRAM': u'systemd', - u'SEQNUM': u'8', - u'SOURCE': u'syslog_tcp', - u'SOURCEIP': u'10.1.20.139', - u'TAGS': u'.source.syslog_tcp' + 'CATEGORY': 'syslog', + 'DATE': 'Oct 27 14:01:12', + 'FACILITY': 'daemon', + 'HOST': 'ub_server', + 'HOST_FROM': '10.1.20.139', + 'LEGACY_MSGHDR': 'systemd[1]: ', + 'MESSAGE': 'Stopped Getty on tty1.', + 'PID': '1', + 'PRIORITY': 'info', + 'PROGRAM': 'systemd', + 'SEQNUM': '8', + 'SOURCE': 'syslog_tcp', + 'SOURCEIP': '10.1.20.139', + 'TAGS': '.source.syslog_tcp' } result = self.key_mapping(syslog_dict) diff --git a/tests/mq/test_esworker_sns_sqs.py b/tests/mq/test_esworker_sns_sqs.py index 95c9c4416..ef009541c 100644 --- a/tests/mq/test_esworker_sns_sqs.py +++ b/tests/mq/test_esworker_sns_sqs.py @@ -59,91 +59,91 @@ def test_syslog_event(self): } self.consumer.on_message(event) expected_event = { - u'category': u'syslog', - u'details': {u'logger': u'systemslogs'}, - u'hostname': u'abcdefghostname', - u'mozdefhostname': u'unittest.hostname', - u'processid': u'123', - u'processname': u'dhclient', - u'receivedtimestamp': u'2017-05-26T17:47:17.813876+00:00', - u'severity': u'INFO', - u'source': u'UNKNOWN', - u'summary': u'DHCPREQUEST of 1.2.3.4 on eth0 to 5.6.7.8 port 67 (xid=0x123456)', - u'tags': [u'example-logs-mozdef'], - u'timestamp': u'2017-05-25T07:14:15+00:00', - u'utctimestamp': u'2017-05-25T07:14:15+00:00', - u'plugins': [], - u'type': 'event' + 'category': 'syslog', + 'details': {'logger': 'systemslogs'}, + 'hostname': 'abcdefghostname', + 'mozdefhostname': 'unittest.hostname', + 'processid': '123', + 'processname': 'dhclient', + 'receivedtimestamp': '2017-05-26T17:47:17.813876+00:00', + 'severity': 'INFO', + 'source': 'UNKNOWN', + 'summary': 'DHCPREQUEST of 1.2.3.4 on eth0 to 5.6.7.8 port 67 (xid=0x123456)', + 'tags': ['example-logs-mozdef'], + 'timestamp': '2017-05-25T07:14:15+00:00', + 'utctimestamp': '2017-05-25T07:14:15+00:00', + 'plugins': [], + 'type': 'event' } self.search_and_verify_event(expected_event) def test_sso_event(self): message_dict = { - u'category': u'user_feedback', - u'details': { - u'action': u'escalate', - u'alert_information': { - u'alert_code': u'12345', - u'alert_id': u'abcdefg', - u'alert_str_json': u'{"url": "https://www.mozilla.org/alert", "severity": "NOTICE", "tags": ["geomodel"], "utctimestamp": "1976-09-13T07:43:49+00:00", "category": "geomodel", "summary": "christianherring@gmail.com NEWCOUNTRY New York, Mauritania access from 25.141.235.246", "details": {"locality_details": {"city": "New York", "country": "Mauritania"}, "category": "NEWCOUNTRY", "principal": "christianherring@gmail.com", "source_ip": "25.141.235.246"}}', - u'date': u'1998-06-24', - u'description': u'This alert is created based on geo ip information about the last login of a user.', - u'duplicate': False, - u'last_update': 1524700512, - u'risk': u'high', - u'state': u'escalate', - u'summary': u'Did you recently login from New York, Mauritania (25.141.235.246)?', - u'url': u'https://www.mozilla.org', - u'url_title': u'Get Help', - u'user_id': u'ad|Mozilla-LDAP-Dev|ttesterson' + 'category': 'user_feedback', + 'details': { + 'action': 'escalate', + 'alert_information': { + 'alert_code': '12345', + 'alert_id': 'abcdefg', + 'alert_str_json': '{"url": "https://www.mozilla.org/alert", "severity": "NOTICE", "tags": ["geomodel"], "utctimestamp": "1976-09-13T07:43:49+00:00", "category": "geomodel", "summary": "christianherring@gmail.com NEWCOUNTRY New York, Mauritania access from 25.141.235.246", "details": {"locality_details": {"city": "New York", "country": "Mauritania"}, "category": "NEWCOUNTRY", "principal": "christianherring@gmail.com", "source_ip": "25.141.235.246"}}', + 'date': '1998-06-24', + 'description': 'This alert is created based on geo ip information about the last login of a user.', + 'duplicate': False, + 'last_update': 1524700512, + 'risk': 'high', + 'state': 'escalate', + 'summary': 'Did you recently login from New York, Mauritania (25.141.235.246)?', + 'url': 'https://www.mozilla.org', + 'url_title': 'Get Help', + 'user_id': 'ad|Mozilla-LDAP-Dev|ttesterson' } } } event = { - u'Message': json.dumps(message_dict), - u'MessageId': u'123456-248e-5b78-84c5-46ac332ea6cd', - u'Signature': u'abcdefgh', - u'SignatureVersion': u'1', - u'SigningCertURL': u'https://sns.us-west-2.amazonaws.com/SimpleNotificationService-1098765.pem', - u'Subject': u'sso-dashboard-user-feedback', - u'Timestamp': u'2018-04-25T23:55:12.854Z', - u'TopicArn': u'arn:aws:sns:us-west-2:7777777777:SSODashboardAlertFeedback', - u'Type': u'Notification', - u'UnsubscribeURL': u'https://sns.us-west-2.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:us-west-2:7777777777:SSODashboardAlertFeedback:123456-248e-5b78-84c5-46ac332ea6cd' + 'Message': json.dumps(message_dict), + 'MessageId': '123456-248e-5b78-84c5-46ac332ea6cd', + 'Signature': 'abcdefgh', + 'SignatureVersion': '1', + 'SigningCertURL': 'https://sns.us-west-2.amazonaws.com/SimpleNotificationService-1098765.pem', + 'Subject': 'sso-dashboard-user-feedback', + 'Timestamp': '2018-04-25T23:55:12.854Z', + 'TopicArn': 'arn:aws:sns:us-west-2:7777777777:SSODashboardAlertFeedback', + 'Type': 'Notification', + 'UnsubscribeURL': 'https://sns.us-west-2.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:us-west-2:7777777777:SSODashboardAlertFeedback:123456-248e-5b78-84c5-46ac332ea6cd' } self.consumer.on_message(event) expected_event = { - u'category': u'user_feedback', - u'details': { - u'action': u'escalate', - u'alert_information': { - u'alert_code': u'12345', - u'alert_id': u'abcdefg', - u'alert_str_json': message_dict['details']['alert_information']['alert_str_json'], - u'date': u'1998-06-24', - u'description': u'This alert is created based on geo ip information about the last login of a user.', - u'duplicate': False, - u'last_update': 1524700512, - u'risk': u'high', - u'state': u'escalate', - u'summary': u'Did you recently login from New York, Mauritania (25.141.235.246)?', - u'url': u'https://www.mozilla.org', - u'url_title': u'Get Help', - u'user_id': u'ad|Mozilla-LDAP-Dev|ttesterson' + 'category': 'user_feedback', + 'details': { + 'action': 'escalate', + 'alert_information': { + 'alert_code': '12345', + 'alert_id': 'abcdefg', + 'alert_str_json': message_dict['details']['alert_information']['alert_str_json'], + 'date': '1998-06-24', + 'description': 'This alert is created based on geo ip information about the last login of a user.', + 'duplicate': False, + 'last_update': 1524700512, + 'risk': 'high', + 'state': 'escalate', + 'summary': 'Did you recently login from New York, Mauritania (25.141.235.246)?', + 'url': 'https://www.mozilla.org', + 'url_title': 'Get Help', + 'user_id': 'ad|Mozilla-LDAP-Dev|ttesterson' } }, - u'hostname': u'UNKNOWN', - u'mozdefhostname': u'unittest.hostname', - u'processid': u'UNKNOWN', - u'processname': u'UNKNOWN', - u'receivedtimestamp': u'2018-04-26T00:11:23.479565+00:00', - u'severity': u'INFO', - u'source': u'UNKNOWN', - u'summary': u'UNKNOWN', - u'tags': [u'example-logs-mozdef'], - u'timestamp': u'2018-04-26T00:11:23.479771+00:00', - u'utctimestamp': u'2018-04-26T00:11:23.479771+00:00', - u'plugins': [], - u'type': 'event' + 'hostname': 'UNKNOWN', + 'mozdefhostname': 'unittest.hostname', + 'processid': 'UNKNOWN', + 'processname': 'UNKNOWN', + 'receivedtimestamp': '2018-04-26T00:11:23.479565+00:00', + 'severity': 'INFO', + 'source': 'UNKNOWN', + 'summary': 'UNKNOWN', + 'tags': ['example-logs-mozdef'], + 'timestamp': '2018-04-26T00:11:23.479771+00:00', + 'utctimestamp': '2018-04-26T00:11:23.479771+00:00', + 'plugins': [], + 'type': 'event' } self.search_and_verify_event(expected_event) From ed1d4aa8cf4ad3b8c0a9cc7f92187aae579fc00e Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sat, 29 Jun 2019 15:45:51 -0500 Subject: [PATCH 33/63] Fixup remaining python3 leftovers --- alerts/celeryconfig.py | 2 +- benchmarking/workers/json2Mozdef.py | 2 +- cron/update_geolite_db.py | 6 ++++-- examples/demo/sampleData2MozDef.py | 4 ++-- mq/esworker_cloudtrail.py | 2 +- mq/esworker_eventtask.py | 2 +- 6 files changed, 10 insertions(+), 8 deletions(-) diff --git a/alerts/celeryconfig.py b/alerts/celeryconfig.py index cdf396c3e..60db6659c 100644 --- a/alerts/celeryconfig.py +++ b/alerts/celeryconfig.py @@ -72,7 +72,7 @@ alert_class = getattr(alert_module, alert_classname) app.register_task(alert_class()) except ImportError as e: - print("Error importing {}").format(alert_namespace) + print("Error importing {}".format(alert_namespace)) print(e) pass except Exception as e: diff --git a/benchmarking/workers/json2Mozdef.py b/benchmarking/workers/json2Mozdef.py index c531b6295..eaf5ed304 100755 --- a/benchmarking/workers/json2Mozdef.py +++ b/benchmarking/workers/json2Mozdef.py @@ -15,7 +15,7 @@ from requests_futures.sessions import FuturesSession from multiprocessing import Process, Queue import logging -from Queue import Empty +from queue import Empty from requests.packages.urllib3.exceptions import ClosedPoolError import time diff --git a/cron/update_geolite_db.py b/cron/update_geolite_db.py index a2274b372..a04d34742 100755 --- a/cron/update_geolite_db.py +++ b/cron/update_geolite_db.py @@ -9,7 +9,9 @@ import os from configlib import getConfig, OptionParser -import urllib2 +import urllib.request +import urllib.error +import urllib.parse import tempfile import tarfile @@ -19,7 +21,7 @@ def fetch_db_data(db_download_location): logger.debug('Fetching db data from ' + db_download_location) - response = urllib2.urlopen(db_download_location) + response = urllib.request.urlopen(db_download_location) db_raw_data = response.read() with tempfile.NamedTemporaryFile(mode='wb') as temp: logger.debug('Writing compressed gzip to temp file: ' + temp.name) diff --git a/examples/demo/sampleData2MozDef.py b/examples/demo/sampleData2MozDef.py index 761eb2b41..d1150b1b8 100755 --- a/examples/demo/sampleData2MozDef.py +++ b/examples/demo/sampleData2MozDef.py @@ -13,7 +13,7 @@ from multiprocessing import Process, Queue import random import logging -from Queue import Empty +from queue import Empty import requests import time from configlib import getConfig, OptionParser @@ -66,7 +66,7 @@ def postLogs(logcache): a=httpsession.get_adapter(url) a.max_retries=3 r=httpsession.post(url,data=postdata) - print(r, postdata) + print(r) # append to posts if this is long running and you want # events to try again later. # posts.append((r,postdata,url)) diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index 1625d0f43..2c458f1a9 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -16,7 +16,7 @@ import boto.s3 from boto.sqs.message import RawMessage import gzip -from StringIO import StringIO +from io import StringIO import re import time import kombu diff --git a/mq/esworker_eventtask.py b/mq/esworker_eventtask.py index a142a07fc..f5c992330 100755 --- a/mq/esworker_eventtask.py +++ b/mq/esworker_eventtask.py @@ -112,7 +112,7 @@ def keyMapping(aDict): returndict['details']['message'] = v else: if len(v) > 0: - for details_key, details_value in v.iteritems(): + for details_key, details_value in v.items(): returndict['details'][details_key] = details_value # custom fields/details as a one off, not in an array From a79e380da9acc1bb3b4d32480f9e4a69b10cca88 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sat, 29 Jun 2019 16:43:43 -0500 Subject: [PATCH 34/63] Fixup pytest deprecation warnings --- tests/conftest.py | 11 +++++++++ .../query_models/query_test_suite.py | 3 +-- .../mozdef_util/test_elasticsearch_client.py | 24 +++++++++---------- tests/requirements_tests.txt | 2 +- tests/rest/test_rest_index.py | 9 ++++--- tests/unit_test_suite.py | 11 +++++---- 6 files changed, 35 insertions(+), 25 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 2ba78f23d..3f00ff4cb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -23,6 +23,17 @@ def pytest_addoption(parser): ) +def pytest_generate_tests(metafunc): + ''' just to attach the cmd-line args to a test-class that needs them ''' + delete_indexes = metafunc.config.getoption("delete_indexes") + if delete_indexes and hasattr(metafunc.cls, 'config_delete_indexes'): + metafunc.cls.config_delete_indexes = delete_indexes + + delete_queues = metafunc.config.getoption("delete_queues") + if delete_queues and hasattr(metafunc.cls, 'config_delete_queues'): + metafunc.cls.config_delete_queues = delete_queues + + def pytest_configure(config): warning_text = "" if not config.option.delete_indexes: diff --git a/tests/mozdef_util/query_models/query_test_suite.py b/tests/mozdef_util/query_models/query_test_suite.py index 28e2b642a..6548d17e6 100644 --- a/tests/mozdef_util/query_models/query_test_suite.py +++ b/tests/mozdef_util/query_models/query_test_suite.py @@ -7,7 +7,6 @@ import os import sys -import pytest from mozdef_util.query_models import SearchQuery sys.path.append(os.path.join(os.path.dirname(__file__), "../../")) @@ -28,7 +27,7 @@ def test_query_class(self): query = testcase[0] events = testcase[1] for event in events: - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.reset_elasticsearch() self.setup_elasticsearch() diff --git a/tests/mozdef_util/test_elasticsearch_client.py b/tests/mozdef_util/test_elasticsearch_client.py index ee7202291..4a62b5f22 100644 --- a/tests/mozdef_util/test_elasticsearch_client.py +++ b/tests/mozdef_util/test_elasticsearch_client.py @@ -120,7 +120,7 @@ class TestCloseIndex(ElasticsearchClientTest): def teardown(self): super(TestCloseIndex, self).teardown() - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.delete_index('test_index') def test_close_index(self): @@ -135,7 +135,7 @@ class TestWritingToClosedIndex(ElasticsearchClientTest): def teardown(self): super(TestWritingToClosedIndex, self).teardown() - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.delete_index('test_index') def test_writing_to_closed_index(self): @@ -152,7 +152,7 @@ class TestOpenIndex(ElasticsearchClientTest): def teardown(self): super(TestOpenIndex, self).teardown() - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.delete_index('test_index') def test_index_open(self): @@ -404,11 +404,11 @@ class TestGetIndices(ElasticsearchClientTest): def teardown(self): super(TestGetIndices, self).teardown() - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.delete_index('test_index') def test_get_indices(self): - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.create_index('test_index') time.sleep(1) indices = self.es_client.get_indices() @@ -420,11 +420,11 @@ class TestIndexExists(ElasticsearchClientTest): def teardown(self): super(TestIndexExists, self).teardown() - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.delete_index('test_index') def test_index_exists(self): - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.create_index('test_index') time.sleep(1) indices = self.es_client.index_exists('test_index') @@ -453,20 +453,20 @@ class TestCreatingAlias(ElasticsearchClientTest): def setup(self): super(TestCreatingAlias, self).setup() - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.delete_index('index1', True) self.es_client.delete_index('index2', True) self.es_client.delete_index('alias1', True) def teardown(self): super(TestCreatingAlias, self).teardown() - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.delete_index('index1', True) self.es_client.delete_index('index2', True) self.es_client.delete_index('alias1', True) def test_simple_create_alias(self): - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.create_index('index1') self.es_client.create_alias('alias1', 'index1') alias_indices = self.es_client.get_alias('alias1') @@ -475,7 +475,7 @@ def test_simple_create_alias(self): assert 'index1' in indices def test_alias_multiple_indices(self): - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.create_index('index1') self.es_client.create_index('index2') self.es_client.create_alias('alias1', 'index1') @@ -519,7 +519,7 @@ def setup(self): # Recreate the test indexes with a custom mapping to throw # parsing errors - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.delete_index("events", True) self.es_client.delete_index(self.event_index_name, True) self.es_client.create_index(self.event_index_name, index_config=mapping) diff --git a/tests/requirements_tests.txt b/tests/requirements_tests.txt index 093d85d68..e84471b89 100644 --- a/tests/requirements_tests.txt +++ b/tests/requirements_tests.txt @@ -2,5 +2,5 @@ freezegun==0.3.9 flake8==3.5.0 flake8-per-file-ignores==0.6 mock==2.0.0 -pytest==3.1.1 +pytest==4.6.4 WebTest==2.0.27 diff --git a/tests/rest/test_rest_index.py b/tests/rest/test_rest_index.py index f4cc8bf88..71a30c871 100644 --- a/tests/rest/test_rest_index.py +++ b/tests/rest/test_rest_index.py @@ -10,7 +10,6 @@ import json import time -import pytest from dateutil.parser import parse from .rest_test_suite import RestTestSuite @@ -46,12 +45,12 @@ def save_dashboard(self, dash_file, dash_name): def teardown(self): super(TestKibanaDashboardsRoute, self).teardown() - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.delete_index('.kibana', True) def setup(self): super(TestKibanaDashboardsRoute, self).setup() - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.delete_index('.kibana', True) self.es_client.create_index('.kibana') @@ -84,14 +83,14 @@ class TestKibanaDashboardsRouteWithoutDashboards(RestTestSuite): def setup(self): super(TestKibanaDashboardsRouteWithoutDashboards, self).setup() - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.delete_index('.kibana', True) self.es_client.create_index('.kibana') time.sleep(0.2) def teardown(self): super(TestKibanaDashboardsRouteWithoutDashboards, self).teardown() - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.es_client.delete_index('.kibana', True) def test_route_endpoints(self): diff --git a/tests/unit_test_suite.py b/tests/unit_test_suite.py index 28c2a8ec1..04b7783c4 100644 --- a/tests/unit_test_suite.py +++ b/tests/unit_test_suite.py @@ -9,7 +9,6 @@ from dateutil.parser import parse import random -import pytest import sys from mozdef_util.utilities import toUTC @@ -18,6 +17,8 @@ class UnitTestSuite(object): + config_delete_indexes = None + config_delete_queues = None def setup(self): self.options = parse_config_file() @@ -30,20 +31,20 @@ def setup(self): self.previous_event_index_name = (current_date - timedelta(days=1)).strftime("events-%Y%m%d") self.alert_index_name = current_date.strftime("alerts-%Y%m") - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.reset_elasticsearch() self.setup_elasticsearch() - if pytest.config.option.delete_queues: + if self.config_delete_queues: self.reset_rabbitmq() def reset_rabbitmq(self): self.rabbitmq_alerts_consumer.channel.queue_purge() def teardown(self): - if pytest.config.option.delete_indexes: + if self.config_delete_indexes: self.reset_elasticsearch() - if pytest.config.option.delete_queues: + if self.config_delete_queues: self.reset_rabbitmq() # Remove any leftover plugin module as a result of loading if 'plugins' in sys.modules: From 417ecf40b69675d83c42aa22ce6ba4fb258003db Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sun, 30 Jun 2019 16:03:36 -0500 Subject: [PATCH 35/63] Fix local import for alerttask --- alerts/lib/alerttask.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/alerts/lib/alerttask.py b/alerts/lib/alerttask.py index 79faa488a..d687bcdab 100644 --- a/alerts/lib/alerttask.py +++ b/alerts/lib/alerttask.py @@ -18,12 +18,13 @@ from collections import Counter from celery import Task from celery.utils.log import get_task_logger -from config import RABBITMQ, ES, ALERT_PLUGINS from mozdef_util.utilities.toUTC import toUTC from mozdef_util.elasticsearch_client import ElasticsearchClient from mozdef_util.query_models import TermMatch, ExistsMatch +from .config import RABBITMQ, ES, ALERT_PLUGINS + sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib")) from lib.alert_plugin_set import AlertPluginSet From e3543a86a66c3d5c671cfe117b7b705bb247a9f1 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sun, 30 Jun 2019 16:05:21 -0500 Subject: [PATCH 36/63] Fix relative imports for mq lib --- mq/esworker_eventtask.py | 2 +- mq/esworker_sns_sqs.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mq/esworker_eventtask.py b/mq/esworker_eventtask.py index f5c992330..0f5bccda6 100755 --- a/mq/esworker_eventtask.py +++ b/mq/esworker_eventtask.py @@ -22,7 +22,7 @@ from mozdef_util.utilities.to_unicode import toUnicode from mozdef_util.utilities.remove_at import removeAt -from .lib.plugins import sendEventToPlugins, registerPlugins +from lib.plugins import sendEventToPlugins, registerPlugins # running under uwsgi? diff --git a/mq/esworker_sns_sqs.py b/mq/esworker_sns_sqs.py index 4e4739b58..6702214c5 100755 --- a/mq/esworker_sns_sqs.py +++ b/mq/esworker_sns_sqs.py @@ -23,9 +23,9 @@ from mozdef_util.utilities.logger import logger, initLogger from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchBadServer, ElasticsearchInvalidIndex, ElasticsearchException -from .lib.aws import get_aws_credentials -from .lib.plugins import sendEventToPlugins, registerPlugins -from .lib.sqs import connect_sqs +from lib.aws import get_aws_credentials +from lib.plugins import sendEventToPlugins, registerPlugins +from lib.sqs import connect_sqs # running under uwsgi? From b8e7bc14db144ed4e8778320e769a5b2da99686d Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sun, 30 Jun 2019 16:25:53 -0500 Subject: [PATCH 37/63] Update healthandstatus hex logic --- cron/healthAndStatus.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cron/healthAndStatus.py b/cron/healthAndStatus.py index e0198f10f..a554f2693 100755 --- a/cron/healthAndStatus.py +++ b/cron/healthAndStatus.py @@ -23,7 +23,8 @@ def getDocID(servername): # create a hash to use as the ES doc id # hostname plus salt as doctype.latest hash = md5() - hash.update('{0}.mozdefhealth.latest'.format(servername)) + seed = '{0}.mozdefhealth.latest'.format(servername) + hash.update(seed.encode()) return hash.hexdigest() From a30acb2c0b7b042709d79b51fbb9353a97cb579c Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sun, 30 Jun 2019 16:26:04 -0500 Subject: [PATCH 38/63] Update tounicode method --- mozdef_util/mozdef_util/utilities/to_unicode.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mozdef_util/mozdef_util/utilities/to_unicode.py b/mozdef_util/mozdef_util/utilities/to_unicode.py index 7fd0321db..6a2edef40 100644 --- a/mozdef_util/mozdef_util/utilities/to_unicode.py +++ b/mozdef_util/mozdef_util/utilities/to_unicode.py @@ -1,4 +1,4 @@ -def toUnicode(obj, encoding='utf-8'): +def toUnicode(obj): if not isinstance(obj, str): - obj = str(obj, encoding) + obj = str(obj) return obj From 1cb870968161b3e9eaf7bcde0d49723c67a99cdd Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sun, 30 Jun 2019 16:52:32 -0500 Subject: [PATCH 39/63] Fix local includes for mq and alerts --- alerts/lib/alerttask.py | 3 ++- mq/esworker_cloudtrail.py | 7 ++++--- mq/esworker_eventtask.py | 4 +++- mq/esworker_papertrail.py | 4 +++- mq/esworker_sns_sqs.py | 8 +++++--- mq/esworker_sqs.py | 8 +++++--- 6 files changed, 22 insertions(+), 12 deletions(-) diff --git a/alerts/lib/alerttask.py b/alerts/lib/alerttask.py index d687bcdab..34bb26a5c 100644 --- a/alerts/lib/alerttask.py +++ b/alerts/lib/alerttask.py @@ -23,7 +23,8 @@ from mozdef_util.elasticsearch_client import ElasticsearchClient from mozdef_util.query_models import TermMatch, ExistsMatch -from .config import RABBITMQ, ES, ALERT_PLUGINS +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../")) +from lib.config import RABBITMQ, ES, ALERT_PLUGINS sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib")) from lib.alert_plugin_set import AlertPluginSet diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index 2c458f1a9..251be9870 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -29,9 +29,10 @@ from mozdef_util.utilities.to_unicode import toUnicode from mozdef_util.utilities.remove_at import removeAt -from lib.aws import get_aws_credentials -from lib.plugins import sendEventToPlugins, registerPlugins -from lib.sqs import connect_sqs +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../")) +from mq.lib.aws import get_aws_credentials +from mq.lib.plugins import sendEventToPlugins, registerPlugins +from mq.lib.sqs import connect_sqs CLOUDTRAIL_VERB_REGEX = re.compile(r'^([A-Z][^A-Z]*)') diff --git a/mq/esworker_eventtask.py b/mq/esworker_eventtask.py index 0f5bccda6..04e4ae06e 100755 --- a/mq/esworker_eventtask.py +++ b/mq/esworker_eventtask.py @@ -9,6 +9,7 @@ import json import kombu import sys +import os import socket from configlib import getConfig, OptionParser from datetime import datetime @@ -22,7 +23,8 @@ from mozdef_util.utilities.to_unicode import toUnicode from mozdef_util.utilities.remove_at import removeAt -from lib.plugins import sendEventToPlugins, registerPlugins +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../")) +from mq.lib.plugins import sendEventToPlugins, registerPlugins # running under uwsgi? diff --git a/mq/esworker_papertrail.py b/mq/esworker_papertrail.py index f2b0f731e..5a04e8a03 100755 --- a/mq/esworker_papertrail.py +++ b/mq/esworker_papertrail.py @@ -12,6 +12,7 @@ import json import kombu import sys +import os import socket import time from configlib import getConfig, OptionParser @@ -26,7 +27,8 @@ from mozdef_util.utilities.remove_at import removeAt from mozdef_util.utilities.logger import logger, initLogger -from lib.plugins import sendEventToPlugins, registerPlugins +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../")) +from mq.lib.plugins import sendEventToPlugins, registerPlugins # running under uwsgi? diff --git a/mq/esworker_sns_sqs.py b/mq/esworker_sns_sqs.py index 6702214c5..b6c54ced7 100755 --- a/mq/esworker_sns_sqs.py +++ b/mq/esworker_sns_sqs.py @@ -9,6 +9,7 @@ import json import sys +import os import socket import time from configlib import getConfig, OptionParser @@ -23,9 +24,10 @@ from mozdef_util.utilities.logger import logger, initLogger from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchBadServer, ElasticsearchInvalidIndex, ElasticsearchException -from lib.aws import get_aws_credentials -from lib.plugins import sendEventToPlugins, registerPlugins -from lib.sqs import connect_sqs +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../")) +from mq.lib.aws import get_aws_credentials +from mq.lib.plugins import sendEventToPlugins, registerPlugins +from mq.lib.sqs import connect_sqs # running under uwsgi? diff --git a/mq/esworker_sqs.py b/mq/esworker_sqs.py index 4c66df401..da3423b24 100755 --- a/mq/esworker_sqs.py +++ b/mq/esworker_sqs.py @@ -13,6 +13,7 @@ import json import sys +import os import socket import time from configlib import getConfig, OptionParser @@ -28,9 +29,10 @@ from mozdef_util.utilities.logger import logger, initLogger from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchBadServer, ElasticsearchInvalidIndex, ElasticsearchException -from lib.aws import get_aws_credentials -from lib.plugins import sendEventToPlugins, registerPlugins -from lib.sqs import connect_sqs +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../")) +from mq.lib.aws import get_aws_credentials +from mq.lib.plugins import sendEventToPlugins, registerPlugins +from mq.lib.sqs import connect_sqs # running under uwsgi? From 81eebb18b77f5e35892476642ffda760b22e4c3b Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sun, 30 Jun 2019 17:17:37 -0500 Subject: [PATCH 40/63] Fixup cloudtrail worker to use BytesIO --- mq/esworker_cloudtrail.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index 251be9870..c3f0fadab 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -16,7 +16,7 @@ import boto.s3 from boto.sqs.message import RawMessage import gzip -from io import StringIO +from io import BytesIO import re import time import kombu @@ -313,7 +313,7 @@ def reauth_timer(self): def process_file(self, s3file): logger.debug("Fetching %s" % s3file.name) compressedData = s3file.read() - databuf = StringIO(compressedData) + databuf = BytesIO(compressedData) gzip_file = gzip.GzipFile(fileobj=databuf) json_logs = json.loads(gzip_file.read()) return json_logs['Records'] From 0e0e92e1433cc600a16b091b07854c2cb17addb4 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Sun, 30 Jun 2019 17:35:19 -0500 Subject: [PATCH 41/63] Update hash.update calls to encode first --- cron/correlateUserMacAddress.py | 3 ++- cron/sqs_queue_status.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/cron/correlateUserMacAddress.py b/cron/correlateUserMacAddress.py index 253cfb66d..695a201aa 100755 --- a/cron/correlateUserMacAddress.py +++ b/cron/correlateUserMacAddress.py @@ -19,7 +19,8 @@ def getDocID(usermacaddress): # create a hash to use as the ES doc id hash = md5() - hash.update('{0}.mozdefintel.usernamemacaddress'.format(usermacaddress)) + seed = '{0}.mozdefintel.usernamemacaddress'.format(usermacaddress) + hash.update(seed.encode()) return hash.hexdigest() diff --git a/cron/sqs_queue_status.py b/cron/sqs_queue_status.py index 0a6eaf63c..1690ed5c2 100644 --- a/cron/sqs_queue_status.py +++ b/cron/sqs_queue_status.py @@ -28,7 +28,8 @@ def getDocID(sqsregionidentifier): # create a hash to use as the ES doc id # hostname plus salt as doctype.latest hash = md5() - hash.update('{0}.mozdefhealth.latest'.format(sqsregionidentifier)) + seed = '{0}.mozdefhealth.latest'.format(sqsregionidentifier) + hash.update(seed.encode()) return hash.hexdigest() From 75e5e7c9abb13ae833fd2dff139489ad281adf10 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 1 Jul 2019 17:12:22 -0500 Subject: [PATCH 42/63] Update references to python2 in docs and yml files --- .../cloudformation/mozdef-alert-developer.yml | 18 ++++++------ cloudy_mozdef/experiments/alert-writer.yml | 18 ++++++------ cloudy_mozdef/lambda_layer/Makefile | 6 ++-- docs/source/installation.rst | 28 +++++++++---------- .../notebooks/ES--Put sample events.ipynb | 8 +++--- 5 files changed, 39 insertions(+), 39 deletions(-) diff --git a/cloudy_mozdef/cloudformation/mozdef-alert-developer.yml b/cloudy_mozdef/cloudformation/mozdef-alert-developer.yml index 513249523..8e3245278 100644 --- a/cloudy_mozdef/cloudformation/mozdef-alert-developer.yml +++ b/cloudy_mozdef/cloudformation/mozdef-alert-developer.yml @@ -23,7 +23,7 @@ Resources: S3Bucket: public.us-west-2.security.allizom.org S3Key: mozdef-lambda-layer/layer-latest.zip CompatibleRuntimes: - - python2.7 + - python3.6 LicenseInfo: 'MPL 2.0' LambdalertIAMRole: Type: AWS::IAM::Role @@ -36,16 +36,16 @@ Resources: Service: lambda.amazonaws.com Action: sts:AssumeRole ManagedPolicyArns: - - arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole - AlertWritersEnv: + - arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole + AlertWritersEnv: Type: "AWS::Lambda::Function" - Properties: + Properties: Handler: "lambdalert.handle" - Role: - Fn::GetAtt: + Role: + Fn::GetAtt: - "LambdalertIAMRole" - "Arn" - Code: + Code: S3Bucket: public.us-west-2.security.allizom.org S3Key: mozdef-lambda-layer/function-latest.zip Layers: @@ -55,9 +55,9 @@ Resources: OPTIONS_ESSERVERS: !Ref ESUrl OPTIONS_MQPROTOCOL: sqs VpcConfig: - SecurityGroupIds: + SecurityGroupIds: - !Ref MozDefSecurityGroup SubnetIds: !Ref PublicSubnetIds ReservedConcurrentExecutions: 1 - Runtime: "python2.7" + Runtime: "python3.6" Timeout: 120 diff --git a/cloudy_mozdef/experiments/alert-writer.yml b/cloudy_mozdef/experiments/alert-writer.yml index 513249523..8e3245278 100644 --- a/cloudy_mozdef/experiments/alert-writer.yml +++ b/cloudy_mozdef/experiments/alert-writer.yml @@ -23,7 +23,7 @@ Resources: S3Bucket: public.us-west-2.security.allizom.org S3Key: mozdef-lambda-layer/layer-latest.zip CompatibleRuntimes: - - python2.7 + - python3.6 LicenseInfo: 'MPL 2.0' LambdalertIAMRole: Type: AWS::IAM::Role @@ -36,16 +36,16 @@ Resources: Service: lambda.amazonaws.com Action: sts:AssumeRole ManagedPolicyArns: - - arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole - AlertWritersEnv: + - arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole + AlertWritersEnv: Type: "AWS::Lambda::Function" - Properties: + Properties: Handler: "lambdalert.handle" - Role: - Fn::GetAtt: + Role: + Fn::GetAtt: - "LambdalertIAMRole" - "Arn" - Code: + Code: S3Bucket: public.us-west-2.security.allizom.org S3Key: mozdef-lambda-layer/function-latest.zip Layers: @@ -55,9 +55,9 @@ Resources: OPTIONS_ESSERVERS: !Ref ESUrl OPTIONS_MQPROTOCOL: sqs VpcConfig: - SecurityGroupIds: + SecurityGroupIds: - !Ref MozDefSecurityGroup SubnetIds: !Ref PublicSubnetIds ReservedConcurrentExecutions: 1 - Runtime: "python2.7" + Runtime: "python3.6" Timeout: 120 diff --git a/cloudy_mozdef/lambda_layer/Makefile b/cloudy_mozdef/lambda_layer/Makefile index 167a4a5cb..552f9275a 100644 --- a/cloudy_mozdef/lambda_layer/Makefile +++ b/cloudy_mozdef/lambda_layer/Makefile @@ -13,7 +13,7 @@ clean: rm -rf $(ROOT_DIR)/build/* .PHONY: deploy-shell -deploy-shell: +deploy-shell: docker run -ti -v ~/.aws:/root/.aws -v ${PWD}:/var/task mozdef/mozdef_base:latest .PHONY: package-layer @@ -34,13 +34,13 @@ package-function: clean zip -r /var/task/cloudy_mozdef/lambda_layer/function-latest.zip ." .PHONY: upload-s3 -upload-s3: +upload-s3: aws s3 cp ${PWD}/cloudy_mozdef/lambda_layer/layer-latest.zip s3://public.us-west-2.security.allizom.org/mozdef-lambda-layer/layer-latest.zip aws s3 cp ${PWD}/cloudy_mozdef/lambda_layer/function-latest.zip s3://public.us-west-2.security.allizom.org/mozdef-lambda-layer/function-latest.zip .PHONY: publish-layer publish-layer: upload-s3 aws lambda publish-layer-version \ - --layer-name mozdef --compatible-runtimes python2.7 \ + --layer-name mozdef --compatible-runtimes python3.6 \ --content S3Bucket=public.us-west-2.security.allizom.org,S3Key=mozdef-lambda-layer/layer-latest.zip diff --git a/docs/source/installation.rst b/docs/source/installation.rst index ed202d6b3..b562d7a01 100644 --- a/docs/source/installation.rst +++ b/docs/source/installation.rst @@ -66,7 +66,7 @@ Create a mozdef user:: chown mozdef: .bash* chown -R mozdef: * -We need to install a python2.7 virtualenv. +We need to install a python3.6 virtualenv. On Yum-based systems:: @@ -79,23 +79,23 @@ On APT-based systems:: Then:: sudo -i -u mozdef -g mozdef - mkdir /opt/mozdef/python2.7 - wget https://www.python.org/ftp/python/2.7.11/Python-2.7.11.tgz - tar xvzf Python-2.7.11.tgz - cd Python-2.7.11 - ./configure --prefix=/opt/mozdef/python2.7 --enable-shared LDFLAGS="-Wl,--rpath=/opt/mozdef/python2.7/lib" + mkdir /opt/mozdef/python3.6 + wget https://www.python.org/ftp/python/3.6.9/Python-3.6.9.tgz + tar xvzf Python-3.6.9.tgz + cd Python-3.6.9 + ./configure --prefix=/opt/mozdef/python3.6 --enable-shared LDFLAGS="-Wl,--rpath=/opt/mozdef/python3.6/lib" make make install cd /opt/mozdef wget https://bootstrap.pypa.io/get-pip.py - export LD_LIBRARY_PATH=/opt/mozdef/python2.7/lib/ - ./python2.7/bin/python get-pip.py - ./python2.7/bin/pip install virtualenv + export LD_LIBRARY_PATH=/opt/mozdef/python3.6/lib/ + ./python3.6/bin/python get-pip.py + ./python3.6/bin/pip install virtualenv mkdir ~/envs cd ~/envs - ~/python2.7/bin/virtualenv python + ~/python3.6/bin/virtualenv python source python/bin/activate pip install -r ../requirements.txt @@ -109,9 +109,9 @@ Copy the following into a file called .bash_profile for the mozdef user within / export PATH -At this point when you launch python from within your virtual environment, It should tell you that you're using Python 2.7.11. +At this point when you launch python from within your virtual environment, It should tell you that you're using Python 3.6.9. -Whenever you launch a python script from now on, you should have your mozdef virtualenv active and your LD_LIBRARY_PATH env variable should include /opt/mozdef/python2.7/lib/ automatically. +Whenever you launch a python script from now on, you should have your mozdef virtualenv active and your LD_LIBRARY_PATH env variable should include /opt/mozdef/python3.6/lib/ automatically. RabbitMQ ******** @@ -330,8 +330,8 @@ We use `uwsgi`_ to interface python and nginx, in your venv execute the followin wget https://projects.unbit.it/downloads/uwsgi-2.0.17.1.tar.gz tar zxvf uwsgi-2.0.17.1.tar.gz cd uwsgi-2.0.17.1 - ~/python2.7/bin/python uwsgiconfig.py --build - ~/python2.7/bin/python uwsgiconfig.py --plugin plugins/python core + ~/python3.6/bin/python uwsgiconfig.py --build + ~/python3.6/bin/python uwsgiconfig.py --plugin plugins/python core cp python_plugin.so ~/envs/python/bin/ cp uwsgi ~/envs/python/bin/ diff --git a/examples/notebooks/ES--Put sample events.ipynb b/examples/notebooks/ES--Put sample events.ipynb index dc2a8cc02..1911efb5a 100644 --- a/examples/notebooks/ES--Put sample events.ipynb +++ b/examples/notebooks/ES--Put sample events.ipynb @@ -136,9 +136,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 2", + "display_name": "Python 3", "language": "python", - "name": "python2" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -149,8 +149,8 @@ "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.5" + "pygments_lexer": "ipython3", + "version": "3.6.9" } }, "nbformat": 4, From ed6c3f6abe248cc9649bfbaef9b2b7d6d53c4107 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 1 Jul 2019 17:31:27 -0500 Subject: [PATCH 43/63] Remove escaped quotes from 2to3 tool --- tests/mq/plugins/test_broFixup.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/tests/mq/plugins/test_broFixup.py b/tests/mq/plugins/test_broFixup.py index ceeccca16..b28766e8d 100644 --- a/tests/mq/plugins/test_broFixup.py +++ b/tests/mq/plugins/test_broFixup.py @@ -736,12 +736,12 @@ def test_smtp_log(self): "mailfrom":"bugzilla-daemon@mozilla.org", "rcptto":["bugmail@firebot.glob.uno"], "date":"Mon, 18 Sep 2017 02:59:56 +0000", - "from":"\\u0022Bugzilla@Mozilla\\u0022 ", + "from":"\u0022Bugzilla@Mozilla\u0022 ", "to":["bugmail@firebot.glob.uno"], "msg_id":"", - "subject":"[Bug 1400759] New: Debugger script search not working when content type = \\u0027image/svg+xml\\u0027", - "first_received":"by jobqueue2.bugs.scl3.mozilla.com (Postfix, from userid 0)\\u0009id 87345380596; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", - "second_received":"from jobqueue2.bugs.scl3.mozilla.com (jobqueue2.bugs.scl3.mozilla.com [10.22.82.42])\\u0009by mx1.mail.scl3.mozilla.com (Postfix) with ESMTPS id 9EBCBC0A97\\u0009for ; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", + "subject":"[Bug 1400759] New: Debugger script search not working when content type = \u0027image/svg+xml\u0027", + "first_received":"by jobqueue2.bugs.scl3.mozilla.com (Postfix, from userid 0)\u0009id 87345380596; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", + "second_received":"from jobqueue2.bugs.scl3.mozilla.com (jobqueue2.bugs.scl3.mozilla.com [10.22.82.42])\u0009by mx1.mail.scl3.mozilla.com (Postfix) with ESMTPS id 9EBCBC0A97\u0009for ; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", "last_reply":"250 2.0.0 Ok: queued as 3E1EC13F655", "path":["128.199.139.6","63.245.214.155","127.0.0.1","10.22.82.42"], "tls":'false', @@ -775,9 +775,9 @@ def test_smtp_log2(self): "mailfrom":"bugzilla-daemon@mozilla.org", "rcptto":["bugmail@firebot.glob.uno"], "date":"Mon, 18 Sep 2017 02:59:56 +0000", - "subject":"[Bug 1400759] New: Debugger script search not working when content type = \\u0027image/svg+xml\\u0027", - "first_received":"by jobqueue2.bugs.scl3.mozilla.com (Postfix, from userid 0)\\u0009id 87345380596; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", - "second_received":"from jobqueue2.bugs.scl3.mozilla.com (jobqueue2.bugs.scl3.mozilla.com [10.22.82.42])\\u0009by mx1.mail.scl3.mozilla.com (Postfix) with ESMTPS id 9EBCBC0A97\\u0009for ; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", + "subject":"[Bug 1400759] New: Debugger script search not working when content type = \u0027image/svg+xml\u0027", + "first_received":"by jobqueue2.bugs.scl3.mozilla.com (Postfix, from userid 0)\u0009id 87345380596; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", + "second_received":"from jobqueue2.bugs.scl3.mozilla.com (jobqueue2.bugs.scl3.mozilla.com [10.22.82.42])\u0009by mx1.mail.scl3.mozilla.com (Postfix) with ESMTPS id 9EBCBC0A97\u0009for ; Mon, 18 Sep 2017 02:59:56 +0000 (UTC)", "last_reply":"250 2.0.0 Ok: queued as 3E1EC13F655", "path":["128.199.139.6","63.245.214.155","127.0.0.1","10.22.82.42"], "tls":'false', @@ -1752,7 +1752,7 @@ def test_dcerpc_log(self): "id.resp_h":"10.22.69.21", "id.resp_p":445, "rtt":0.001135, - "named_pipe":"\\u005cpipe\\u005clsass", + "named_pipe":"\u005cpipe\u005clsass", "endpoint":"samr", "operation":"SamrEnumerateDomainsInSamServer" } @@ -1783,7 +1783,7 @@ def test_dcerpc_log2(self): "id.resp_h":"10.22.69.21", "id.resp_p":445, "rtt":0.001135, - "named_pipe":"\\u005cpipe\\u005clsass" + "named_pipe":"\u005cpipe\u005clsass" } event['MESSAGE'] = json.dumps(MESSAGE) @@ -1977,8 +1977,8 @@ def test_smbfiles_log(self): "id.resp_h":"10.22.69.21", "id.resp_p":445, "action":"SMB::FILE_OPEN", - "name":"releng.ad.mozilla.com\\u005cPolicies\\u005c{8614FE9A-333C-47C1-9EFD-856B4DF64883}\\u005cMachine\\u005cPreferences\\u005cScheduledTasks", - "path":"\\u005c\\u005cDC8.releng.ad.mozilla.com\\u005cSysVol", + "name":"releng.ad.mozilla.com\u005cPolicies\u005c{8614FE9A-333C-47C1-9EFD-856B4DF64883}\u005cMachine\u005cPreferences\u005cScheduledTasks", + "path":"\u005c\u005cDC8.releng.ad.mozilla.com\u005cSysVol", "size":4096, "times.modified":1401486067.13068, "times.accessed":1401486067.13068, @@ -2058,7 +2058,7 @@ def test_smbmapping_log(self): "id.orig_p":49720, "id.resp_h":"10.22.69.18", "id.resp_p":445, - "path":"\\u005c\\u005cDC6\\u005cSYSVOL", + "path":"\u005c\u005cDC6\u005cSYSVOL", "share_type":"DISK" } event['MESSAGE'] = json.dumps(MESSAGE) From dd1f74ef65cab3a64d4ee1f61b86d82e7bdcd92e Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 1 Jul 2019 17:38:19 -0500 Subject: [PATCH 44/63] Update ip_whois bot command --- bot/irc/mozdefbot.py | 2 +- bot/slack/commands/ip_whois.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/irc/mozdefbot.py b/bot/irc/mozdefbot.py index 3fcb20d60..00063da86 100755 --- a/bot/irc/mozdefbot.py +++ b/bot/irc/mozdefbot.py @@ -219,7 +219,7 @@ def priv_handler(client, actor, recipient, message): ip = netaddr.IPNetwork(field)[0] if (not ip.is_loopback() and not ip.is_private() and not ip.is_reserved()): whois = IPWhois(ip).lookup_whois() - description = whois['nets'][0]['description'].encode('string_escape') + description = whois['nets'][0]['description'] self.client.msg( recipient, "{0} description: {1}".format(field, description)) else: diff --git a/bot/slack/commands/ip_whois.py b/bot/slack/commands/ip_whois.py index 1118ac117..76feb115b 100644 --- a/bot/slack/commands/ip_whois.py +++ b/bot/slack/commands/ip_whois.py @@ -16,7 +16,7 @@ def handle_command(self, parameters): ip = netaddr.IPNetwork(ip_token)[0] if (not ip.is_loopback() and not ip.is_private() and not ip.is_reserved()): whois = IPWhois(ip).lookup_whois() - description = str(whois['nets'][0]['description']).encode('string_escape') + description = whois['nets'][0]['description'] response += "{0} description: {1}\n".format(ip_token, description) else: response += "{0}: hrm...loopback? private ip?\n".format(ip_token) From 1c122cd8907e00c6732bb5d00e5d71431e426222 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 1 Jul 2019 18:33:37 -0500 Subject: [PATCH 45/63] Create dev package of mozdef_util 3.0.0 --- mozdef_util/HISTORY.rst | 7 +++++++ mozdef_util/setup.py | 2 +- requirements.txt | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/mozdef_util/HISTORY.rst b/mozdef_util/HISTORY.rst index 8fe5ce3bb..79c4865c6 100644 --- a/mozdef_util/HISTORY.rst +++ b/mozdef_util/HISTORY.rst @@ -79,3 +79,10 @@ Add is_ip utility function ------------------ * Fixed static file includes in python package + + +3.0.0 (2019-??-??) +------------------ + +* Updated to work with python3 +* Removed support for python2 diff --git a/mozdef_util/setup.py b/mozdef_util/setup.py index 2363dc786..79de64352 100644 --- a/mozdef_util/setup.py +++ b/mozdef_util/setup.py @@ -59,6 +59,6 @@ test_suite='tests', tests_require=[], url='https://github.com/mozilla/MozDef/tree/master/lib', - version='2.0.3', + version='3.0.0-dev', zip_safe=False, ) diff --git a/requirements.txt b/requirements.txt index 2a59f2ada..10d929d38 100644 --- a/requirements.txt +++ b/requirements.txt @@ -32,7 +32,7 @@ jmespath==0.9.3 kombu==4.1.0 meld3==1.0.2 mozdef-client==1.0.11 -mozdef-util==2.0.3 +mozdef-util==3.0.0.dev0 netaddr==0.7.19 nose==1.3.7 oauth2client==1.4.12 From fb4576924ca5fd3de7926af5644af3012ecd9642 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 1 Jul 2019 20:27:14 -0500 Subject: [PATCH 46/63] Update auth0 and google cron scripts --- cron/auth02mozdef.py | 2 +- cron/google2mozdef.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/cron/auth02mozdef.py b/cron/auth02mozdef.py index 12e54608a..96ad32e2b 100644 --- a/cron/auth02mozdef.py +++ b/cron/auth02mozdef.py @@ -253,7 +253,7 @@ def byteify(input): elif isinstance(input, list): return [byteify(element) for element in input] elif not isinstance(input, str): - return input.encode() + return str(input) else: return input diff --git a/cron/google2mozdef.py b/cron/google2mozdef.py index 15d914cac..74fb230c3 100755 --- a/cron/google2mozdef.py +++ b/cron/google2mozdef.py @@ -106,7 +106,7 @@ def main(): # or you will get access denied even with correct delegations/scope credentials = SignedJwtAssertionCredentials(client_email, - private_key, + private_key.encode(), scope=scope, sub=options.impersonate) http = Http() @@ -134,8 +134,10 @@ def main(): # change key/values like: # actor.email=someone@mozilla.com # to actor_email=value - - key,value =keyValue.split('=') + try: + key,value =keyValue.split('=') + except ValueError as e: + continue key=key.replace('.','_').lower() details[key]=value From cd722d15c1bb3a8c431b2241f1e3ef0f46714fab Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Tue, 2 Jul 2019 11:55:20 -0500 Subject: [PATCH 47/63] Update collect attackers cron script syntax --- cron/collectAttackers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cron/collectAttackers.py b/cron/collectAttackers.py index 3a1ff01d6..1f670c2ca 100755 --- a/cron/collectAttackers.py +++ b/cron/collectAttackers.py @@ -240,8 +240,8 @@ def searchMongoAlerts(mozdefdb): if len(categoryCounts) == 1: # is the alert category mapped to an attacker category? for category in options.categorymapping: - if category.keys()[0] == categoryCounts[0][0]: - attacker['category'] = category[category.keys()[0]] + if list(category.keys())[0] == categoryCounts[0][0]: + attacker['category'] = category[list(category.keys())[0]] attackers.save(attacker) From 84a0c9d0c72651ef0f67f4fe4336fdbb1452538a Mon Sep 17 00:00:00 2001 From: Gene Wood Date: Tue, 2 Jul 2019 15:33:19 -0700 Subject: [PATCH 48/63] Swap out boto3 for boto in a few cron scripts --- cron/createIPBlockList.py | 25 ++++++++++--------------- cron/sqs_queue_status.py | 16 +++++++++------- cron/update_ip_list.py | 11 +++++------ 3 files changed, 24 insertions(+), 28 deletions(-) diff --git a/cron/createIPBlockList.py b/cron/createIPBlockList.py index ecefd2fb8..f5be82ad7 100755 --- a/cron/createIPBlockList.py +++ b/cron/createIPBlockList.py @@ -5,8 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2014 Mozilla Corporation -import boto -import boto.s3 +import boto3 import netaddr import random import sys @@ -197,19 +196,15 @@ def s3_upload_file(file_path, bucket_name, key_name): """ Upload a file to the given s3 bucket and return a template url. """ - conn = boto.connect_s3(aws_access_key_id=options.aws_access_key_id, aws_secret_access_key=options.aws_secret_access_key) - try: - bucket = conn.get_bucket(bucket_name, validate=False) - except boto.exception.S3ResponseError: - conn.create_bucket(bucket_name) - bucket = conn.get_bucket(bucket_name, validate=False) - - key = boto.s3.key.Key(bucket) - key.key = key_name - key.set_contents_from_filename(file_path) - - key.set_acl('public-read') - url = "https://s3.amazonaws.com/{}/{}".format(bucket.name, key.name) + s3 = boto3.resource( + 's3', + aws_access_key_id=options.aws_access_key_id, + aws_secret_access_key=options.aws_secret_access_key + ) + bucket = s3.create_bucket(Bucket=bucket_name) # This call is idempotent + s3.meta.client.upload_file( + file_path, bucket_name, key_name, ExtraArgs={'ACL': 'public-read'}) + url = "https://s3.amazonaws.com/{}/{}".format(bucket_name, key_name) print("URL: {}".format(url)) return url diff --git a/cron/sqs_queue_status.py b/cron/sqs_queue_status.py index 0a6eaf63c..780d5605c 100644 --- a/cron/sqs_queue_status.py +++ b/cron/sqs_queue_status.py @@ -17,7 +17,7 @@ from configlib import getConfig, OptionParser from datetime import datetime from hashlib import md5 -import boto.sqs +import boto3 from mozdef_util.utilities.toUTC import toUTC from mozdef_util.utilities.logger import logger @@ -41,8 +41,9 @@ def getQueueSizes(): qcount = len(options.taskexchange) qcounter = qcount - 1 - mqConn = boto.sqs.connect_to_region( - options.region, + client = boto3.client( + 'sqs', + region_name=options.region, aws_access_key_id=options.accesskey, aws_secret_access_key=options.secretkey ) @@ -50,10 +51,11 @@ def getQueueSizes(): while qcounter >= 0: for exchange in options.taskexchange: logger.debug('Looking for sqs queue stats in queue' + exchange) - eventTaskQueue = mqConn.get_queue(exchange) - # get queue stats - taskQueueStats = eventTaskQueue.get_attributes('All') - sqslist['queue_stats'][qcounter] = taskQueueStats + response = client.get_queue_attributes( + QueueUrl=client.get_queue_url(QueueName=exchange)['QueueUrl'], + AttributeNames=['All'] + ) + sqslist['queue_stats'][qcounter] = response['Attributes'] sqslist['queue_stats'][qcounter]['name'] = exchange qcounter -= 1 diff --git a/cron/update_ip_list.py b/cron/update_ip_list.py index 4fd2384b6..74d31b60e 100644 --- a/cron/update_ip_list.py +++ b/cron/update_ip_list.py @@ -8,21 +8,20 @@ import sys import os from configlib import getConfig, OptionParser -import boto +import boto3 from mozdef_util.utilities.logger import logger, initLogger def fetch_ip_list(aws_key_id, aws_secret_key, s3_bucket, ip_list_filename): logger.debug("Fetching ip list from s3") - s3 = boto.connect_s3( + client = boto3.client( + 's3', aws_access_key_id=aws_key_id, aws_secret_access_key=aws_secret_key ) - bucket = s3.get_bucket(s3_bucket) - ip_list_key = bucket.lookup(ip_list_filename) - contents = ip_list_key.get_contents_as_string().rstrip() - return contents.split("\n") + response = client.get_object(Bucket=s3_bucket, Key=ip_list_filename) + return response['Body'].read().rstrip().splitlines() def save_ip_list(save_path, ips): From 2c797919cdd4249a34423438b67c376052c29797 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Tue, 2 Jul 2019 18:13:34 -0500 Subject: [PATCH 49/63] Convert ips to string in update_ip_list --- cron/update_ip_list.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/cron/update_ip_list.py b/cron/update_ip_list.py index 74d31b60e..cdc4f5c37 100644 --- a/cron/update_ip_list.py +++ b/cron/update_ip_list.py @@ -21,8 +21,11 @@ def fetch_ip_list(aws_key_id, aws_secret_key, s3_bucket, ip_list_filename): aws_secret_access_key=aws_secret_key ) response = client.get_object(Bucket=s3_bucket, Key=ip_list_filename) - return response['Body'].read().rstrip().splitlines() - + ip_content_list = response['Body'].read().rstrip().splitlines() + ips = [] + for ip in ip_content_list: + ips.append(ip.decode()) + return ips def save_ip_list(save_path, ips): ip_list_contents = '\n'.join(ips) From f585fbaf1ac17e43a5d67b3399163f21af85bc9b Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Tue, 2 Jul 2019 19:00:34 -0500 Subject: [PATCH 50/63] Remove unnecessary create bucket call --- cron/createIPBlockList.py | 1 - 1 file changed, 1 deletion(-) diff --git a/cron/createIPBlockList.py b/cron/createIPBlockList.py index 1f57d2bf8..7849bad81 100755 --- a/cron/createIPBlockList.py +++ b/cron/createIPBlockList.py @@ -202,7 +202,6 @@ def s3_upload_file(file_path, bucket_name, key_name): aws_access_key_id=options.aws_access_key_id, aws_secret_access_key=options.aws_secret_access_key ) - bucket = s3.create_bucket(Bucket=bucket_name) # This call is idempotent s3.meta.client.upload_file( file_path, bucket_name, key_name, ExtraArgs={'ACL': 'public-read'}) url = "https://s3.amazonaws.com/{}/{}".format(bucket_name, key_name) From 70bdc5778d0cdf45d8000d3e66b124ee6e2dd38b Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Tue, 2 Jul 2019 19:06:05 -0500 Subject: [PATCH 51/63] Update boto calls in createFQDNBlocklist --- cron/createFDQNBlockList.py | 25 +++++++++---------------- 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/cron/createFDQNBlockList.py b/cron/createFDQNBlockList.py index f6d3b71a8..4358f05cd 100644 --- a/cron/createFDQNBlockList.py +++ b/cron/createFDQNBlockList.py @@ -4,9 +4,7 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Copyright (c) 2014 Mozilla Corporation - -import boto -import boto.s3 +import boto3 import logging import random import re @@ -149,19 +147,14 @@ def s3_upload_file(file_path, bucket_name, key_name): """ Upload a file to the given s3 bucket and return a template url. """ - conn = boto.connect_s3(aws_access_key_id=options.aws_access_key_id, aws_secret_access_key=options.aws_secret_access_key) - try: - bucket = conn.get_bucket(bucket_name, validate=False) - except boto.exception.S3ResponseError: - conn.create_bucket(bucket_name) - bucket = conn.get_bucket(bucket_name, validate=False) - - key = boto.s3.key.Key(bucket) - key.key = key_name - key.set_contents_from_filename(file_path) - - key.set_acl('public-read') - url = "https://s3.amazonaws.com/{}/{}".format(bucket.name, key.name) + s3 = boto3.resource( + 's3', + aws_access_key_id=options.aws_access_key_id, + aws_secret_access_key=options.aws_secret_access_key + ) + s3.meta.client.upload_file( + file_path, bucket_name, key_name, ExtraArgs={'ACL': 'public-read'}) + url = "https://s3.amazonaws.com/{}/{}".format(bucket_name, key_name) print("URL: {}".format(url)) return url From 63c6cbf857291c591ece053795b71b7cb2ad6d64 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Tue, 2 Jul 2019 19:14:33 -0500 Subject: [PATCH 52/63] Update mq workers to remove need to use RawMessage --- mq/esworker_cloudtrail.py | 3 --- mq/esworker_sns_sqs.py | 4 ---- mq/esworker_sqs.py | 5 ----- 3 files changed, 12 deletions(-) diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index c3f0fadab..4a3342be4 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -14,7 +14,6 @@ from datetime import datetime import boto.sts import boto.s3 -from boto.sqs.message import RawMessage import gzip from io import BytesIO import re @@ -319,7 +318,6 @@ def process_file(self, s3file): return json_logs['Records'] def run(self): - self.taskQueue.set_message_class(RawMessage) while True: try: records = self.taskQueue.get_messages(options.prefetch) @@ -363,7 +361,6 @@ def run(self): options.region, options.accesskey, options.secretkey)) - self.taskQueue.set_message_class(RawMessage) def on_message(self, body): # print("RECEIVED MESSAGE: %r" % (body, )) diff --git a/mq/esworker_sns_sqs.py b/mq/esworker_sns_sqs.py index b6c54ced7..167557c9c 100755 --- a/mq/esworker_sns_sqs.py +++ b/mq/esworker_sns_sqs.py @@ -16,7 +16,6 @@ from datetime import datetime import pytz -from boto.sqs.message import RawMessage import kombu from ssl import SSLEOFError, SSLError @@ -53,8 +52,6 @@ def __init__(self, mqConnection, taskQueue, esConnection, options): self.options = options def run(self): - self.taskQueue.set_message_class(RawMessage) - while True: try: records = self.taskQueue.get_messages(self.options.prefetch) @@ -80,7 +77,6 @@ def run(self): options.secretkey, options.taskexchange ) - self.taskQueue.set_message_class(RawMessage) def on_message(self, message): try: diff --git a/mq/esworker_sqs.py b/mq/esworker_sqs.py index da3423b24..28d5d6ae4 100755 --- a/mq/esworker_sqs.py +++ b/mq/esworker_sqs.py @@ -18,7 +18,6 @@ import time from configlib import getConfig, OptionParser from datetime import datetime -from boto.sqs.message import RawMessage import base64 import kombu from ssl import SSLEOFError, SSLError @@ -168,9 +167,6 @@ def __init__(self, mqConnection, taskQueue, esConnection): self.taskQueue = taskQueue def run(self): - # Boto expects base64 encoded messages - but if the writer is not boto it's not necessarily base64 encoded - # Thus we've to detect that and decode or not decode accordingly - self.taskQueue.set_message_class(RawMessage) while True: try: records = self.taskQueue.get_messages(options.prefetch) @@ -238,7 +234,6 @@ def run(self): options.secretkey, options.taskexchange ) - self.taskQueue.set_message_class(RawMessage) def on_message(self, body, message): # print("RECEIVED MESSAGE: %r" % (body, )) From b310a74ef5ae7cb40bc096a301eb84d1c9f5552c Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Wed, 3 Jul 2019 11:46:31 -0500 Subject: [PATCH 53/63] Add extra line to update_ip_list cron script --- cron/update_ip_list.py | 1 + 1 file changed, 1 insertion(+) diff --git a/cron/update_ip_list.py b/cron/update_ip_list.py index cdc4f5c37..408b42fbe 100644 --- a/cron/update_ip_list.py +++ b/cron/update_ip_list.py @@ -27,6 +27,7 @@ def fetch_ip_list(aws_key_id, aws_secret_key, s3_bucket, ip_list_filename): ips.append(ip.decode()) return ips + def save_ip_list(save_path, ips): ip_list_contents = '\n'.join(ips) logger.debug("Saving ip list") From 8a8562fce830d42a564f29d61e3fcd785c7c8720 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Wed, 3 Jul 2019 12:38:07 -0500 Subject: [PATCH 54/63] Convert sqs boto use to boto3 --- mq/esworker_cloudtrail.py | 23 ++++++++++++----------- mq/esworker_sns_sqs.py | 19 +++++++++---------- mq/esworker_sqs.py | 23 +++++++++++------------ mq/lib/sqs.py | 23 ++++++----------------- 4 files changed, 38 insertions(+), 50 deletions(-) diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index 4a3342be4..8ce2749ae 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -276,10 +276,9 @@ def esConnect(): class taskConsumer(object): - def __init__(self, mqConnection, taskQueue, esConnection): - self.connection = mqConnection + def __init__(self, queue, esConnection): + self.sqs_queue = queue self.esConnection = esConnection - self.taskQueue = taskQueue self.s3_connection = None # This value controls how long we sleep # between reauthenticating and getting a new set of creds @@ -320,9 +319,9 @@ def process_file(self, s3file): def run(self): while True: try: - records = self.taskQueue.get_messages(options.prefetch) + records = self.sqs_queue.receive_messages(MaxNumberOfMessages=options.prefetch) for msg in records: - body_message = msg.get_body() + body_message = msg.body event = json.loads(body_message) if not event['Message']: @@ -351,16 +350,17 @@ def run(self): for event in events: self.on_message(event) - self.taskQueue.delete_message(msg) + msg.delete() except (SSLEOFError, SSLError, socket.error): logger.info('Received network related error...reconnecting') time.sleep(5) - self.connection, self.taskQueue = connect_sqs( + self.sqs_queue = connect_sqs( task_exchange=options.taskexchange, **get_aws_credentials( options.region, options.accesskey, - options.secretkey)) + options.secretkey) + ) def on_message(self, body): # print("RECEIVED MESSAGE: %r" % (body, )) @@ -452,14 +452,15 @@ def main(): logger.error('Can only process SQS queues, terminating') sys.exit(1) - sqs_conn, eventTaskQueue = connect_sqs( + sqs_queue = connect_sqs( task_exchange=options.taskexchange, **get_aws_credentials( options.region, options.accesskey, - options.secretkey)) + options.secretkey) + ) # consume our queue - taskConsumer(sqs_conn, eventTaskQueue, es).run() + taskConsumer(sqs_queue, es).run() def initConfig(): diff --git a/mq/esworker_sns_sqs.py b/mq/esworker_sns_sqs.py index 167557c9c..6404b4507 100755 --- a/mq/esworker_sns_sqs.py +++ b/mq/esworker_sns_sqs.py @@ -44,34 +44,33 @@ def esConnect(): class taskConsumer(object): - def __init__(self, mqConnection, taskQueue, esConnection, options): - self.connection = mqConnection + def __init__(self, queue, esConnection, options): + self.sqs_queue = queue self.esConnection = esConnection - self.taskQueue = taskQueue self.pluginList = registerPlugins() self.options = options def run(self): while True: try: - records = self.taskQueue.get_messages(self.options.prefetch) + records = self.sqs_queue.receive_messages(MaxNumberOfMessages=options.prefetch) for msg in records: - msg_body = msg.get_body() + msg_body = msg.body try: # get_body() should be json message_json = json.loads(msg_body) self.on_message(message_json) # delete message from queue - self.taskQueue.delete_message(msg) + msg.delete() except ValueError: logger.error('Invalid message, not JSON : %r' % msg_body) - self.taskQueue.delete_message(msg) + msg.delete() continue time.sleep(.1) except (SSLEOFError, SSLError, socket.error): logger.info('Received network related error...reconnecting') time.sleep(5) - self.connection, self.taskQueue = connect_sqs( + self.sqs_queue = connect_sqs( options.region, options.accesskey, options.secretkey, @@ -192,14 +191,14 @@ def main(): logger.error('Can only process SQS queues, terminating') sys.exit(1) - sqs_conn, eventTaskQueue = connect_sqs( + sqs_queue = connect_sqs( task_exchange=options.taskexchange, **get_aws_credentials( options.region, options.accesskey, options.secretkey)) # consume our queue - taskConsumer(sqs_conn, eventTaskQueue, es, options).run() + taskConsumer(sqs_queue, es, options).run() def initConfig(): diff --git a/mq/esworker_sqs.py b/mq/esworker_sqs.py index 28d5d6ae4..bb55e8e24 100755 --- a/mq/esworker_sqs.py +++ b/mq/esworker_sqs.py @@ -161,21 +161,20 @@ def esConnect(): class taskConsumer(object): - def __init__(self, mqConnection, taskQueue, esConnection): - self.connection = mqConnection + def __init__(self, queue, esConnection): + self.sqs_queue = queue self.esConnection = esConnection - self.taskQueue = taskQueue def run(self): while True: try: - records = self.taskQueue.get_messages(options.prefetch) + records = self.sqs_queue.receive_messages(MaxNumberOfMessages=options.prefetch) for msg in records: # msg.id is the id, # get_body() should be json # pre process the message a bit - tmp = msg.get_body() + tmp = msg.body try: msgbody = json.loads(tmp) except ValueError: @@ -185,14 +184,14 @@ def run(self): msgbody = json.loads(tmp) except Exception as e: logger.error('Invalid message, not JSON : %r' % msg.get_body()) - self.taskQueue.delete_message(msg) + msg.delete() continue # If this is still not a dict, # let's just drop the message and move on if type(msgbody) is not dict: logger.debug("Message is not a dictionary, dropping message.") - self.taskQueue.delete_message(msg) + msg.delete() continue event = dict() @@ -219,16 +218,16 @@ def run(self): self.on_message(event, msg) # delete message from queue - self.taskQueue.delete_message(msg) + msg.delete() time.sleep(.1) except ValueError as e: logger.exception('Exception while handling message: %r' % e) - self.taskQueue.delete_message(msg) + msg.delete() except (SSLEOFError, SSLError, socket.error): logger.info('Received network related error...reconnecting') time.sleep(5) - self.connection, self.taskQueue = connect_sqs( + self.sqs_queue = connect_sqs( options.region, options.accesskey, options.secretkey, @@ -331,14 +330,14 @@ def main(): logger.error('Can only process SQS queues, terminating') sys.exit(1) - sqs_conn, eventTaskQueue = connect_sqs( + sqs_queue = connect_sqs( task_exchange=options.taskexchange, **get_aws_credentials( options.region, options.accesskey, options.secretkey)) # consume our queue - taskConsumer(sqs_conn, eventTaskQueue, es).run() + taskConsumer(sqs_queue, es).run() def initConfig(): diff --git a/mq/lib/sqs.py b/mq/lib/sqs.py index 0754c20c2..22f000734 100644 --- a/mq/lib/sqs.py +++ b/mq/lib/sqs.py @@ -1,29 +1,18 @@ -from boto import sqs -import boto -import boto.utils +import boto3 def connect_sqs(region_name=None, aws_access_key_id=None, aws_secret_access_key=None, task_exchange=None): - if region_name is None: - try: - # connect_sqs defaults to us-east-1 instead of the local region - region_name = boto.utils.get_instance_identity( - timeout=0.5, num_retries=1)['document']['region'] - except IndexError: - raise Exception( - "Unable to determine AWS region. Region isn't configured and " - "MozDef isn't running in AWS") - credentials = {} if aws_access_key_id is not None: credentials['aws_access_key_id'] = aws_access_key_id if aws_secret_access_key is not None: credentials['aws_secret_access_key'] = aws_secret_access_key - conn = sqs.connect_to_region( + + sqs = boto3.resource( + 'sqs', region_name=region_name, **credentials ) - - queue = conn.get_queue(task_exchange) - return conn, queue + queue = sqs.get_queue_by_name(QueueName=task_exchange) + return queue From 60487d65b3e65cfa8f2309850176604294fe6061 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Wed, 3 Jul 2019 15:58:12 -0500 Subject: [PATCH 55/63] Tuneup sqs_queue_status cron script --- cron/sqs_queue_status.py | 72 ++++++++++++++-------------------------- 1 file changed, 25 insertions(+), 47 deletions(-) diff --git a/cron/sqs_queue_status.py b/cron/sqs_queue_status.py index ccaaef322..416196946 100644 --- a/cron/sqs_queue_status.py +++ b/cron/sqs_queue_status.py @@ -37,28 +37,35 @@ def getQueueSizes(): logger.debug('starting') logger.debug(options) es = ElasticsearchClient(options.esservers) - sqslist = {} - sqslist['queue_stats'] = {} - qcount = len(options.taskexchange) - qcounter = qcount - 1 - client = boto3.client( - 'sqs', + sqs_client = boto3.client( + "sqs", region_name=options.region, aws_access_key_id=options.accesskey, aws_secret_access_key=options.secretkey ) - - while qcounter >= 0: - for exchange in options.taskexchange: - logger.debug('Looking for sqs queue stats in queue' + exchange) - response = client.get_queue_attributes( - QueueUrl=client.get_queue_url(QueueName=exchange)['QueueUrl'], - AttributeNames=['All'] - ) - sqslist['queue_stats'][qcounter] = response['Attributes'] - sqslist['queue_stats'][qcounter]['name'] = exchange - qcounter -= 1 + queues_stats = { + 'queues': [], + 'total_feeds': len(options.taskexchange), + 'total_messages_ready': 0, + 'username': 'mozdef' + } + for queue_name in options.taskexchange: + logger.debug('Looking for sqs queue stats in queue' + queue_name) + queue_url = sqs_client.get_queue_url(QueueName=queue_name)['QueueUrl'] + queue_attributes = sqs_client.get_queue_attributes(QueueUrl=queue_url, AttributeNames=['All'])['Attributes'] + queue_stats = { + 'queue': queue_name, + } + if 'ApproximateNumberOfMessages' in queue_attributes: + queue_stats['messages_ready'] = int(queue_attributes['ApproximateNumberOfMessages']) + queues_stats['total_messages_ready'] += queue_stats['messages_ready'] + if 'ApproximateNumberOfMessagesNotVisible' in queue_attributes: + queue_stats['messages_inflight'] = int(queue_attributes['ApproximateNumberOfMessagesNotVisible']) + if 'ApproximateNumberOfMessagesDelayed' in queue_attributes: + queue_stats['messages_delayed'] = int(queue_attributes['ApproximateNumberOfMessagesDelayed']) + + queues_stats['queues'].append(queue_stats) # setup a log entry for health/status. sqsid = '{0}-{1}'.format(options.account, options.region) @@ -72,35 +79,8 @@ def getQueueSizes(): category='mozdef', source='aws-sqs', tags=[], - details=[]) - healthlog['details'] = dict(username='mozdef') - healthlog['details']['queues']= list() - healthlog['details']['total_messages_ready'] = 0 - healthlog['details']['total_feeds'] = qcount + details=queues_stats) healthlog['tags'] = ['mozdef', 'status', 'sqs'] - ready = 0 - qcounter = qcount - 1 - for q in sqslist['queue_stats'].keys(): - queuelist = sqslist['queue_stats'][qcounter] - if 'ApproximateNumberOfMessages' in queuelist: - ready1 = int(queuelist['ApproximateNumberOfMessages']) - ready = ready1 + ready - healthlog['details']['total_messages_ready'] = ready - if 'ApproximateNumberOfMessages' in queuelist: - messages = int(queuelist['ApproximateNumberOfMessages']) - if 'ApproximateNumberOfMessagesNotVisible' in queuelist: - inflight = int(queuelist['ApproximateNumberOfMessagesNotVisible']) - if 'ApproximateNumberOfMessagesDelayed' in queuelist: - delayed = int(queuelist['ApproximateNumberOfMessagesDelayed']) - if 'name' in queuelist: - name = queuelist['name'] - queueinfo=dict( - queue=name, - messages_delayed=delayed, - messages_ready=messages, - messages_inflight=inflight) - healthlog['details']['queues'].append(queueinfo) - qcounter -= 1 healthlog['type'] = 'mozdefhealth' # post to elasticsearch servers directly without going through # message queues in case there is an availability issue @@ -109,8 +89,6 @@ def getQueueSizes(): # for use when querying for the latest sqs status healthlog['tags'] = ['mozdef', 'status', 'sqs-latest'] es.save_event(index=options.index, doc_id=getDocID(sqsid), body=json.dumps(healthlog)) -# except Exception as e: -# logger.error("Exception %r when gathering health and status " % e) def main(): From a705b97554d5cdbcd8106c0ec70a2fbcb2778983 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Wed, 3 Jul 2019 16:19:56 -0500 Subject: [PATCH 56/63] Update sns sqs tests --- tests/mq/test_esworker_sns_sqs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/mq/test_esworker_sns_sqs.py b/tests/mq/test_esworker_sns_sqs.py index ef009541c..170f8cb85 100644 --- a/tests/mq/test_esworker_sns_sqs.py +++ b/tests/mq/test_esworker_sns_sqs.py @@ -33,7 +33,7 @@ def setup(self): 'plugincheckfrequency': 120, } ) - self.consumer = taskConsumer(mq_conn, task_queue, es_connection, options) + self.consumer = taskConsumer(mq_conn, es_connection, options) def search_and_verify_event(self, expected_event): self.refresh('events') From 17ab5bbb30284ee736b375a509dc769c5bae2018 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Wed, 3 Jul 2019 17:10:29 -0500 Subject: [PATCH 57/63] Fixup cloudtrail worker to no longer use old boto version --- mq/esworker_cloudtrail.py | 137 +++++++++----------------------------- 1 file changed, 33 insertions(+), 104 deletions(-) diff --git a/mq/esworker_cloudtrail.py b/mq/esworker_cloudtrail.py index 8ce2749ae..46713c76a 100755 --- a/mq/esworker_cloudtrail.py +++ b/mq/esworker_cloudtrail.py @@ -12,8 +12,7 @@ import socket from configlib import getConfig, OptionParser from datetime import datetime -import boto.sts -import boto.s3 +import boto3 import gzip from io import BytesIO import re @@ -44,89 +43,6 @@ hasUWSGI = False -class RoleManager: - def __init__(self, region_name='us-east-1', aws_access_key_id=None, aws_secret_access_key=None): - self.aws_access_key_id = aws_access_key_id - self.aws_secret_access_key = aws_secret_access_key - self.credentials = {} - self.session_credentials = None - self.session_conn_sts = None - try: - self.local_conn_sts = boto.sts.connect_to_region( - **get_aws_credentials( - region_name, - self.aws_access_key_id, - self.aws_secret_access_key)) - except Exception as e: - logger.error("Unable to connect to STS due to exception {0}".format(e)) - raise - - if self.aws_access_key_id is not None or self.aws_secret_access_key is not None: - # We're using API credentials not an IAM Role - try: - if self.session_credentials is None or self.session_credentials.is_expired(): - self.session_credentials = self.local_conn_sts.get_session_token() - except Exception as e: - logger.error("Unable to get session token due to exception {0}".format(e)) - raise - try: - creds = get_aws_credentials( - region_name, - self.session_credentials.access_key, - self.session_credentials.secret_key, - self.session_credentials.session_token) if self.session_credentials else {} - self.session_conn_sts = boto.sts.connect_to_region(**creds) - except Exception as e: - logger.error("Unable to connect to STS with session token due to exception {0}".format(e)) - raise - self.conn_sts = self.session_conn_sts - else: - self.conn_sts = self.local_conn_sts - - def assume_role(self, - role_arn, - role_session_name='unknown', - policy=None): - '''Return a boto.sts.credential.Credential object given a role_arn. - First check if a Credential oject exists in the local self.credentials - cache that is not expired. If there isn't one, assume the role of role_arn - store the Credential in the credentials cache and return it''' - logger.debug("Connecting to sts") - if role_arn in self.credentials: - if not self.credentials[role_arn] or not self.credentials[role_arn].is_expired(): - # Return the cached value if it's False (indicating a permissions issue) or if - # it hasn't expired. - return self.credentials[role_arn] - try: - self.credentials[role_arn] = self.conn_sts.assume_role( - role_arn=role_arn, - role_session_name=role_session_name, - policy=policy).credentials - logger.debug("Assumed new role with credential %s" % self.credentials[role_arn].to_dict()) - except Exception as e: - logger.error("Unable to assume role {0} due to exception {1}".format(role_arn, e)) - self.credentials[role_arn] = False - return self.credentials[role_arn] - - def get_credentials(self, - role_arn, - role_session_name='unknown', - policy=None): - '''Assume the role of role_arn, and return a credential dictionary for that role''' - credential = self.assume_role(role_arn, - role_session_name, - policy) - return self.get_credential_arguments(credential) - - def get_credential_arguments(self, credential): - '''Given a boto.sts.credential.Credential object, return a dictionary of get_credential_arguments - usable as kwargs with a boto connect method''' - return { - 'aws_access_key_id': credential.access_key, - 'aws_secret_access_key': credential.secret_key, - 'security_token': credential.session_token} if credential else {} - - def keyMapping(aDict): '''map common key/fields to a normalized structure, explicitly typed when possible to avoid schema changes for upsteam consumers @@ -279,10 +195,7 @@ class taskConsumer(object): def __init__(self, queue, esConnection): self.sqs_queue = queue self.esConnection = esConnection - self.s3_connection = None - # This value controls how long we sleep - # between reauthenticating and getting a new set of creds - self.flush_wait_time = 1800 + self.s3_client = None self.authenticate() # Run thread to flush s3 credentials @@ -291,16 +204,35 @@ def __init__(self, queue, esConnection): reauthenticate_thread.start() def authenticate(self): + # This value controls how long we sleep + # between reauthenticating and getting a new set of creds + # eventually this gets set by aws response + self.flush_wait_time = 1800 if options.cloudtrail_arn not in ['', 'cloudtrail_arn']: - role_manager = RoleManager(**get_aws_credentials( - options.region, - options.accesskey, - options.secretkey)) - role_manager.assume_role(options.cloudtrail_arn) - role_creds = role_manager.get_credentials(options.cloudtrail_arn) + client = boto3.client( + 'sts', + aws_access_key_id=options.accesskey, + aws_secret_access_key=options.secretkey + ) + response = client.assume_role( + RoleArn=options.cloudtrail_arn, + RoleSessionName='MozDef-CloudTrail-Reader', + ) + role_creds = { + 'aws_access_key_id': response['Credentials']['AccessKeyId'], + 'aws_secret_access_key': response['Credentials']['SecretAccessKey'], + 'aws_session_token': response['Credentials']['SessionToken'] + } + current_time = toUTC(datetime.now()) + # Let's remove 3 seconds from the flush wait time just in case + self.flush_wait_time = (response['Credentials']['Expiration'] - current_time).seconds - 3 else: role_creds = {} - self.s3_connection = boto.connect_s3(**role_creds) + self.s3_client = boto3.client( + 's3', + region_name=options.region, + **role_creds + ) def reauth_timer(self): while True: @@ -308,10 +240,9 @@ def reauth_timer(self): logger.debug('Recycling credentials and reassuming role') self.authenticate() - def process_file(self, s3file): - logger.debug("Fetching %s" % s3file.name) - compressedData = s3file.read() - databuf = BytesIO(compressedData) + def parse_s3_file(self, s3_obj): + compressed_data = s3_obj['Body'].read() + databuf = BytesIO(compressed_data) gzip_file = gzip.GzipFile(fileobj=databuf) json_logs = json.loads(gzip_file.read()) return json_logs['Records'] @@ -343,10 +274,8 @@ def run(self): s3_log_files = message_json['s3ObjectKey'] for log_file in s3_log_files: logger.debug('Downloading and parsing ' + log_file) - bucket = self.s3_connection.get_bucket(message_json['s3Bucket']) - - log_file_lookup = bucket.lookup(log_file) - events = self.process_file(log_file_lookup) + s3_obj = self.s3_client.get_object(Bucket=message_json['s3Bucket'], Key=log_file) + events = self.parse_s3_file(s3_obj) for event in events: self.on_message(event) From dd23f77c5538ba0fcb120042266090af193babb5 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Wed, 3 Jul 2019 17:11:05 -0500 Subject: [PATCH 58/63] Remove boto and botocore requirements --- requirements.txt | 2 -- 1 file changed, 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 10d929d38..61e181d82 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,7 @@ amqp==2.2.2 anyjson==0.3.3 appdirs==1.4.0 -boto==2.49.0 boto3==1.7.67 -botocore==1.10.67 bottle==0.12.4 celery==4.1.0 celery[sqs]==4.1.0 From 721675cfbf8519098f611d3c09aff7c46a54ff5d Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 8 Jul 2019 10:55:19 -0500 Subject: [PATCH 59/63] Update fluentd plugin --- mq/plugins/fluentdSqsFixup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mq/plugins/fluentdSqsFixup.py b/mq/plugins/fluentdSqsFixup.py index c2e5eebe4..2201a0229 100644 --- a/mq/plugins/fluentdSqsFixup.py +++ b/mq/plugins/fluentdSqsFixup.py @@ -113,7 +113,8 @@ def onMessage(self, message, metadata): # Any remaining keys which aren't mandatory fields should be moved # to details # https://mozdef.readthedocs.io/en/latest/usage.html#mandatory-fields - for key in message.keys(): + original_keys = list(message.keys()) + for key in original_keys: if key not in [ 'summary', 'utctimestamp', From ac3b27977c217f16a3b2ea6af24a1661cdf4aa17 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 8 Jul 2019 12:31:50 -0500 Subject: [PATCH 60/63] Fixup lower_keys plugin --- mq/plugins/lower_keys.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mq/plugins/lower_keys.py b/mq/plugins/lower_keys.py index 75f08664c..9b97e472c 100644 --- a/mq/plugins/lower_keys.py +++ b/mq/plugins/lower_keys.py @@ -18,7 +18,8 @@ def __init__(self): def onMessage(self, message, metadata): def renameKeysToLower(message): if isinstance(message, dict): - for key in message.keys(): + message_keys = list(message.keys()) + for key in message_keys: message[key.lower()] = message.pop(key) if isinstance(message[key.lower()], dict) or isinstance(message[key.lower()], list): message[key.lower()] = renameKeysToLower(message[key.lower()]) From 95611c9bc43699241eb69553eb9fbbf9022d3c21 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 8 Jul 2019 12:47:45 -0500 Subject: [PATCH 61/63] Update mozdef_util version to 3.0.0 --- mozdef_util/HISTORY.rst | 2 +- mozdef_util/setup.py | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mozdef_util/HISTORY.rst b/mozdef_util/HISTORY.rst index 79c4865c6..c877ad6de 100644 --- a/mozdef_util/HISTORY.rst +++ b/mozdef_util/HISTORY.rst @@ -81,7 +81,7 @@ Add is_ip utility function * Fixed static file includes in python package -3.0.0 (2019-??-??) +3.0.0 (2019-07-08) ------------------ * Updated to work with python3 diff --git a/mozdef_util/setup.py b/mozdef_util/setup.py index 79de64352..a6ace8259 100644 --- a/mozdef_util/setup.py +++ b/mozdef_util/setup.py @@ -59,6 +59,6 @@ test_suite='tests', tests_require=[], url='https://github.com/mozilla/MozDef/tree/master/lib', - version='3.0.0-dev', + version='3.0.0', zip_safe=False, ) diff --git a/requirements.txt b/requirements.txt index 61e181d82..c8dea40ce 100644 --- a/requirements.txt +++ b/requirements.txt @@ -30,7 +30,7 @@ jmespath==0.9.3 kombu==4.1.0 meld3==1.0.2 mozdef-client==1.0.11 -mozdef-util==3.0.0.dev0 +mozdef-util==3.0.0 netaddr==0.7.19 nose==1.3.7 oauth2client==1.4.12 From 84619ad1904ff25918b08d5f3cfc8f1e0c05c725 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 8 Jul 2019 12:54:01 -0500 Subject: [PATCH 62/63] Update changelog for v3.0.0 --- CHANGELOG | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index e574d1c67..3faa8d58c 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -5,6 +5,14 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## [Unreleased] +## [v3.0.0] - 2019-07-08 +### Added +- Support for Python3 + +### Removed +- Support for Python2 + + ## [v2.0.1] - 2019-07-08 ### Fixed - Ensure all print statements use parenthesis @@ -123,7 +131,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added checks on sending SQS messages to only accept intra-account messages - Improved docker performance and disk space requirements -[Unreleased]: https://github.com/mozilla/MozDef/compare/v2.0.1...HEAD +[Unreleased]: https://github.com/mozilla/MozDef/compare/v3.0.0...HEAD +[v3.0.0]: https://github.com/mozilla/MozDef/compare/v2.0.1...v3.0.0 [v2.0.1]: https://github.com/mozilla/MozDef/compare/v2.0.0...v2.0.1 [v2.0.0]: https://github.com/mozilla/MozDef/compare/v1.40.0...v2.0.0 [v1.40.0]: https://github.com/mozilla/MozDef/compare/v1.40.0...v1.39.0 From ccea6f7180f550025ddf38f0136efc422e1a24e9 Mon Sep 17 00:00:00 2001 From: Brandon Myers Date: Mon, 8 Jul 2019 12:56:43 -0500 Subject: [PATCH 63/63] Update changelog with note about boto deprecation --- CHANGELOG | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG b/CHANGELOG index 3faa8d58c..81158bdb5 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -11,6 +11,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Removed - Support for Python2 +- Usage of boto (boto3 now preferred) ## [v2.0.1] - 2019-07-08