Skip to content

Commit

Permalink
Merge pull request #545 from remind101/threaded-tail
Browse files Browse the repository at this point in the history
Implement --tail with threads
  • Loading branch information
ejholmes authored Mar 1, 2018
2 parents 551214f + 7e14eee commit 6b7fec6
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 9 deletions.
11 changes: 6 additions & 5 deletions stacker/plan.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import logging
import time
import uuid
import multiprocessing
import threading

from .util import stack_template_key_name
from .exceptions import (
Expand Down Expand Up @@ -65,20 +65,21 @@ def run(self):
skipped.
"""

stop_watcher = threading.Event()
watcher = None
if self.watch_func:
watcher = multiprocessing.Process(
watcher = threading.Thread(
target=self.watch_func,
args=(self.stack,)
args=(self.stack, stop_watcher)
)
watcher.start()

try:
while not self.done:
self._run_once()
finally:
if watcher and watcher.is_alive():
watcher.terminate()
if watcher:
stop_watcher.set()
watcher.join()
return self.ok

Expand Down
10 changes: 6 additions & 4 deletions stacker/providers/aws/default.py
Original file line number Diff line number Diff line change
Expand Up @@ -514,7 +514,7 @@ def is_stack_rolling_back(self, stack, **kwargs):
def is_stack_failed(self, stack, **kwargs):
return self.get_stack_status(stack) in self.FAILED_STATUSES

def tail_stack(self, stack, retries=0, **kwargs):
def tail_stack(self, stack, cancel, retries=0, **kwargs):
def log_func(e):
event_args = [e['ResourceStatus'], e['ResourceType'],
e.get('ResourceStatusReason', None)]
Expand All @@ -528,14 +528,15 @@ def log_func(e):

try:
self.tail(stack.fqn,
cancel=cancel,
log_func=log_func,
include_initial=False)
except botocore.exceptions.ClientError as e:
if "does not exist" in e.message and retries < MAX_TAIL_RETRIES:
# stack might be in the process of launching, wait for a second
# and try again
time.sleep(1)
self.tail_stack(stack, retries=retries + 1, **kwargs)
self.tail_stack(stack, cancel, retries=retries + 1, **kwargs)
else:
raise

Expand Down Expand Up @@ -565,7 +566,7 @@ def get_events(self, stackname):
time.sleep(1)
return reversed(sum(event_list, []))

def tail(self, stack_name, log_func=_tail_print, sleep_time=5,
def tail(self, stack_name, cancel, log_func=_tail_print, sleep_time=5,
include_initial=True):
"""Show and then tail the event log"""
# First dump the full list of events in chronological order and keep
Expand All @@ -584,7 +585,8 @@ def tail(self, stack_name, log_func=_tail_print, sleep_time=5,
if e['EventId'] not in seen:
log_func(e)
seen.add(e['EventId'])
time.sleep(sleep_time)
if cancel.wait(sleep_time):
return

def destroy_stack(self, stack, **kwargs):
logger.debug("Destroying stack: %s" % (self.get_stack_name(stack)))
Expand Down
31 changes: 31 additions & 0 deletions tests/suite.bats
Original file line number Diff line number Diff line change
Expand Up @@ -779,3 +779,34 @@ EOF
assert_has_line "${STACKER_NAMESPACE}-vpc2: submitted (creating new stack)"
assert_has_line "${STACKER_NAMESPACE}-vpc2: complete (creating new stack)"
}

@test "stacker build - tailing" {
needs_aws

config() {
cat <<EOF
namespace: ${STACKER_NAMESPACE}
stacks:
- name: vpc
class_path: stacker.tests.fixtures.mock_blueprints.Dummy
- name: bastion
class_path: stacker.tests.fixtures.mock_blueprints.Dummy
requires: [vpc]
EOF
}

teardown() {
stacker destroy --force <(config)
}

# Create the new stacks.
stacker build --tail <(config)
assert "$status" -eq 0
assert_has_line "Using default AWS provider mode"
assert_has_line "Tailing stack: ${STACKER_NAMESPACE}-vpc"
assert_has_line "${STACKER_NAMESPACE}-vpc: submitted (creating new stack)"
assert_has_line "${STACKER_NAMESPACE}-vpc: complete (creating new stack)"
assert_has_line "Tailing stack: ${STACKER_NAMESPACE}-bastion"
assert_has_line "${STACKER_NAMESPACE}-bastion: submitted (creating new stack)"
assert_has_line "${STACKER_NAMESPACE}-bastion: complete (creating new stack)"
}

0 comments on commit 6b7fec6

Please sign in to comment.