forked from GoogleCloudPlatform/DataflowSDK-examples
-
Notifications
You must be signed in to change notification settings - Fork 0
/
multiple_output_pardo_test.py
72 lines (57 loc) · 2.52 KB
/
multiple_output_pardo_test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Test for the multiple_output_pardo example."""
import logging
import re
import tempfile
import unittest
from apache_beam.examples.cookbook import multiple_output_pardo
class MultipleOutputParDo(unittest.TestCase):
SAMPLE_TEXT = 'A whole new world\nA new fantastic point of view'
EXPECTED_SHORT_WORDS = [('A', 2), ('new', 2), ('of', 1)]
EXPECTED_WORDS = [
('whole', 1), ('world', 1), ('fantastic', 1), ('point', 1), ('view', 1)]
def create_temp_file(self, contents):
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(contents)
return f.name
def get_wordcount_results(self, temp_path):
results = []
with open(temp_path) as result_file:
for line in result_file:
match = re.search(r'([A-Za-z]+): ([0-9]+)', line)
if match is not None:
results.append((match.group(1), int(match.group(2))))
return results
def test_multiple_output_pardo(self):
temp_path = self.create_temp_file(self.SAMPLE_TEXT)
result_prefix = temp_path + '.result'
multiple_output_pardo.run([
'--input=%s*' % temp_path,
'--output=%s' % result_prefix]).wait_until_finish()
expected_char_count = len(''.join(self.SAMPLE_TEXT.split('\n')))
with open(result_prefix + '-chars-00000-of-00001') as f:
contents = f.read()
self.assertEqual(expected_char_count, int(contents))
short_words = self.get_wordcount_results(
result_prefix + '-short-words-00000-of-00001')
self.assertEqual(sorted(short_words), sorted(self.EXPECTED_SHORT_WORDS))
words = self.get_wordcount_results(result_prefix + '-words-00000-of-00001')
self.assertEqual(sorted(words), sorted(self.EXPECTED_WORDS))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()