forked from GoogleCloudPlatform/DataflowSDK-examples
-
Notifications
You must be signed in to change notification settings - Fork 0
/
bigquery_tornadoes_it_test.py
66 lines (53 loc) · 2.53 KB
/
bigquery_tornadoes_it_test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""End-to-end test for Bigquery tornadoes example."""
import logging
import time
import unittest
from hamcrest.core.core.allof import all_of
from nose.plugins.attrib import attr
from apache_beam.examples.cookbook import bigquery_tornadoes
from apache_beam.io.gcp.tests.bigquery_matcher import BigqueryMatcher
from apache_beam.testing.pipeline_verifiers import PipelineStateMatcher
from apache_beam.testing.test_pipeline import TestPipeline
class BigqueryTornadoesIT(unittest.TestCase):
# Enable nose tests running in parallel
_multiprocess_can_split_ = True
# The default checksum is a SHA-1 hash generated from sorted rows reading
# from expected Bigquery table.
DEFAULT_CHECKSUM = '83789a7c1bca7959dcf23d3bc37e9204e594330f'
@attr('IT')
def test_bigquery_tornadoes_it(self):
test_pipeline = TestPipeline(is_integration_test=True)
# Set extra options to the pipeline for test purpose
output_table = ('BigQueryTornadoesIT'
'.monthly_tornadoes_%s' % int(round(time.time() * 1000)))
query = 'SELECT month, tornado_count FROM [%s]' % output_table
pipeline_verifiers = [PipelineStateMatcher(),
BigqueryMatcher(
project=test_pipeline.get_option('project'),
query=query,
checksum=self.DEFAULT_CHECKSUM)]
extra_opts = {'output': output_table,
'on_success_matcher': all_of(*pipeline_verifiers)}
# Get pipeline options from command argument: --test-pipeline-options,
# and start pipeline job by calling pipeline main function.
bigquery_tornadoes.run(
test_pipeline.get_full_options_as_args(**extra_opts))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()