Skip to content

Commit

Permalink
save test artifacts (singer-io#104)
Browse files Browse the repository at this point in the history
* save test artifacts

* use logger in tap-tester tests

* missing import start date test

Co-authored-by: kspeer <[email protected]>
  • Loading branch information
kspeer825 and kspeer authored Aug 8, 2022
1 parent 85d13cd commit 55bffa3
Show file tree
Hide file tree
Showing 6 changed files with 21 additions and 30 deletions.
4 changes: 4 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,14 @@ jobs:
command: |
aws s3 cp s3://com-stitchdata-dev-deployment-assets/environments/tap-tester/tap_tester_sandbox dev_env.sh
source dev_env.sh
mkdir /tmp/${CIRCLE_PROJECT_REPONAME}
export STITCH_CONFIG_DIR=/tmp/${CIRCLE_PROJECT_REPONAME}
source /usr/local/share/virtualenvs/tap-tester/bin/activate
run-test --tap=tap-bing-ads tests
- slack/notify-on-failure:
only_for_branches: master
- store_artifacts:
path: /tmp/tap-bing-ads
workflows:
version: 2
commit: &commit_jobs
Expand Down
29 changes: 13 additions & 16 deletions tests/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@
Setup expectations for test sub classes
Run discovery for as a prerequisite for most tests
"""
import unittest
import backoff
import copy
import os
from datetime import timedelta
from datetime import datetime as dt
from datetime import timezone as tz

from tap_tester import connections, menagerie, runner
from tap_tester import connections, menagerie, runner, LOGGER
from tap_tester.base_case import BaseCase

def backoff_wait_times():
"""Create a generator of wait times as [30, 60, 120, 240, 480, ...]"""
Expand All @@ -22,7 +22,7 @@ def __init__(self, message):
super().__init__(message)


class BingAdsBaseTest(unittest.TestCase):
class BingAdsBaseTest(BaseCase):
"""
Setup expectations for test sub classes
Run discovery for as a prerequisite for most tests
Expand Down Expand Up @@ -222,7 +222,7 @@ def run_check_mode(self, conn_id):
menagerie.verify_check_exit_status(self, exit_status, check_job_name)
except AssertionError as e:
if exit_status['discovery_error_message']:
print("*******************RETRYING CHECK FOR DISCOVERY FAILURE*******************")
LOGGER.warn("*******************RETRYING CHECK FOR DISCOVERY FAILURE*******************")
raise RetryableTapError(e)

raise
Expand All @@ -233,7 +233,7 @@ def verify_check_mode(self, conn_id):

found_catalog_names = set(map(lambda c: c['tap_stream_id'], found_catalogs))
self.assertSetEqual(self.expected_streams(), found_catalog_names, msg="discovered schemas do not match")
print("discovered schemas are OK")
LOGGER.info("discovered schemas are OK")
return found_catalogs

def run_and_verify_check_mode(self, conn_id):
Expand Down Expand Up @@ -267,7 +267,7 @@ def run_and_verify_sync(self, conn_id, state):
menagerie.verify_sync_exit_status(self, exit_status, sync_job_name)
except AssertionError as e:
if exit_status['discovery_error_message'] or exit_status['tap_error_message']:
print("*******************RETRYING SYNC FOR TAP/DISCOVERY FAILURE*******************")
LOGGER.warn("*******************RETRYING SYNC FOR TAP/DISCOVERY FAILURE*******************")
raise RetryableTapError(e)

raise
Expand All @@ -279,7 +279,7 @@ def run_and_verify_sync(self, conn_id, state):
sum(sync_record_count.values()), 0,
msg="failed to replicate any data: {}".format(sync_record_count)
)
print("total replicated row count: {}".format(sum(sync_record_count.values())))
LOGGER.info("total replicated row count: %s", sum(sync_record_count.values()))

return sync_record_count

Expand Down Expand Up @@ -372,7 +372,7 @@ def perform_and_verify_table_and_field_selection(self,

# Verify all testable streams are selected
selected = catalog_entry.get('annotated-schema').get('selected')
print("Validating selection on {}: {}".format(cat['stream_name'], selected))
LOGGER.info("Validating selection on %s: %s", cat['stream_name'], selected)
if cat['stream_name'] not in expected_selected:
self.assertFalse(selected, msg="Stream selected, but not testable.")
continue # Skip remaining assertions if we aren't selecting this stream
Expand All @@ -382,8 +382,7 @@ def perform_and_verify_table_and_field_selection(self,
# Verify all fields within each selected stream are selected
for field, field_props in catalog_entry.get('annotated-schema').get('properties').items():
field_selected = field_props.get('selected')
print("\tValidating selection on {}.{}: {}".format(
cat['stream_name'], field, field_selected))
LOGGER.info("\tValidating selection on %s.%s: %s", cat['stream_name'], field, field_selected)
self.assertTrue(field_selected, msg="Field not selected.")
else:
# Verify only automatic fields are selected
Expand All @@ -397,7 +396,7 @@ def get_selected_fields_from_metadata(metadata):
for field in metadata:
is_field_metadata = len(field['breadcrumb']) > 1
if field['metadata'].get('inclusion') is None and is_field_metadata: # BUG_SRCE-4313 remove when addressed
print("Error {} has no inclusion key in metadata".format(field)) # BUG_SRCE-4313 remove when addressed
LOGGER.info("Error %s has no inclusion key in metadata", field) # BUG_SRCE-4313 remove when addressed
continue # BUG_SRCE-4313 remove when addressed
inclusion_automatic_or_selected = (
field['metadata']['selected'] is True or \
Expand Down Expand Up @@ -503,7 +502,7 @@ def perform_and_verify_adjusted_selection(self,

# Verify intended streams are selected
selected = catalog_entry.get('annotated-schema').get('selected')
print("Validating selection on {}: {}".format(cat['tap_stream_id'], selected))
LOGGER.info("Validating selection on %s: %s", cat['tap_stream_id'], selected)
if cat['stream_name'] not in expected_selected:
continue # Skip remaining assertions if we aren't selecting this stream

Expand All @@ -513,15 +512,13 @@ def perform_and_verify_adjusted_selection(self,
# Verify all fields within each selected stream are selected
for field, field_props in catalog_entry.get('annotated-schema').get('properties').items():
field_selected = field_props.get('selected')
print("\tValidating selection on {}.{}: {}".format(
cat['stream_name'], field, field_selected))
LOGGER.info("\tValidating selection on %s.%s: %s", cat['stream_name'], field, field_selected)
self.assertTrue(field_selected, msg="Field not selected.")
else:
for field, field_props in catalog_entry.get('annotated-schema').get('properties').items():
field_selected = field_props.get('selected')
if field_selected:
print("\tValidating selection on {}.{}: {}".format(
cat['stream_name'], field, field_selected))
LOGGER.info("\tValidating selection on %s.%s: %s", cat['stream_name'], field, field_selected)

# Verify only automatic fields are selected
# Uncomment lines below to reporduce BUG_SRCE-4313 from automatic fields tests
Expand Down
3 changes: 0 additions & 3 deletions tests/test_bookmarks.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
import datetime
import dateutil.parser
import pytz
import singer

import tap_tester.connections as connections
import tap_tester.menagerie as menagerie
import tap_tester.runner as runner

from base import BingAdsBaseTest

LOGGER = singer.get_logger()


class TestBingAdsBookmarks(BingAdsBaseTest):

Expand Down
3 changes: 0 additions & 3 deletions tests/test_bookmarks_reports.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
import datetime
import dateutil.parser
import pytz
import singer

import tap_tester.connections as connections
import tap_tester.menagerie as menagerie
import tap_tester.runner as runner

from base import BingAdsBaseTest

LOGGER = singer.get_logger()


class TestBingAdsBookmarksReports(BingAdsBaseTest):

Expand Down
8 changes: 4 additions & 4 deletions tests/test_start_date.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import tap_tester.connections as connections
import tap_tester.runner as runner
import tap_tester.menagerie as menagerie

from tap_tester import LOGGER

from base import BingAdsBaseTest

Expand Down Expand Up @@ -109,14 +109,14 @@ def start_date_test(self, streams_to_fields_with_exclusions):

replicated_row_count_1 = sum(record_count_by_stream_1.values())
self.assertGreater(replicated_row_count_1, 0, msg="failed to replicate any data: {}".format(record_count_by_stream_1))
print("total replicated row count: {}".format(replicated_row_count_1))
LOGGER.info("total replicated row count: %s", replicated_row_count_1)
synced_records_1 = runner.get_records_from_target_output()

##########################################################################
### Update START DATE Between Syncs
##########################################################################

print("REPLICATION START DATE CHANGE: {} ===>>> {} ".format(self.start_date, self.start_date_2))
LOGGER.info("REPLICATION START DATE CHANGE: %s ===>>> %s ", self.start_date, self.start_date_2)
self.start_date = self.start_date_2

##########################################################################
Expand Down Expand Up @@ -148,7 +148,7 @@ def start_date_test(self, streams_to_fields_with_exclusions):

replicated_row_count_2 = sum(record_count_by_stream_2.values())
self.assertGreater(replicated_row_count_2, 0, msg="failed to replicate any data")
print("total replicated row count: {}".format(replicated_row_count_2))
LOGGER.info("total replicated row count: %s", replicated_row_count_2)
synced_records_2 = runner.get_records_from_target_output()

for stream in self.expected_sync_streams():
Expand Down
4 changes: 0 additions & 4 deletions tests/test_sync_rows.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,6 @@
import tap_tester.runner as runner
from base import BingAdsBaseTest

import singer
from singer import metadata
LOGGER = singer.get_logger()


class BingAdsSyncRows(BingAdsBaseTest):

Expand Down

0 comments on commit 55bffa3

Please sign in to comment.