diff --git a/bin/tinyfeedback b/bin/tinyfeedback index b8d5ce5..eccc55f 100755 --- a/bin/tinyfeedback +++ b/bin/tinyfeedback @@ -14,5 +14,5 @@ LOG_LEVEL = 'DEBUG' if __name__ == '__main__': - tinyfeedback.webserver.set_up_server(port=tinyfeedback.helper.PORT, + tinyfeedback.webserver.set_up_server(host=tinyfeedback.helper.HOST, port=tinyfeedback.helper.PORT, log_path=LOG_PATH, log_level=LOG_LEVEL) diff --git a/requirements.txt b/requirements.txt index 7ce8c7b..39f98cc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,3 +3,5 @@ simplejson twisted txredisapi txroutes +numpy +pandas diff --git a/simple_tail.py b/simple_tail.py new file mode 100644 index 0000000..d7ee391 --- /dev/null +++ b/simple_tail.py @@ -0,0 +1,82 @@ +import os, time +import traceback + +class SimpleTail(object): + def __init__(self, filename): + self.filename = filename + self.file = None + self.inode = None + + def readline(self, sleep_on_empty=False): + line = None + try: + # 1) open the file if it's not open + if not self.file: + self.open_file() + + # 2) try to read a line of the file + line = self.file.readline() + + # 3a) if a line is read, update the timestamp of the last line read + if line: + self.last_read = time.time() + elif time.time() - self.last_read > 10: + # 3b) If no changes to the file in 10 seconds reopen it. + try: + self.file.close() + except: + traceback.print_exc() + + self.open_file() + except ValueError: + # 4) if we got a ValueError than it means that readline() was probably closed + try: + # 5) try to close it again + self.file.close() + except: + traceback.print_exc() + + # 6) reopen + self.open_file() + except: + traceback.print_exc() + time.sleep(5) + + # 7) if line is empty, and we sleep on empty lines.. sleep + if line == '' and sleep_on_empty: + time.sleep(1) + + # 8) if we threw an exception line will be None, set it to empty string + if line == None: + line = '' + + # 9) return value + return line + + def open_file(self): + self.file = open(self.filename,'r') + + st_results = os.stat(self.filename) + st_size = st_results[6] + inode = st_results[1] + + inode_has_not_changed = inode == self.inode or self.inode == None + + if inode_has_not_changed: + # seek to end of file if we just started tailing, or are + # + self.file.seek(st_size) + else: + # do not seek to the end of the file, since it's a new file which + # in theory contains new items to tail + pass + + # store last inode + self.inode = st_results[1] + self.last_read = time.time() + +if __name__ == '__main__': + a = SimpleTail("/var/log/test.log") + + while True: + print a.readline(sleep_on_empty=True) \ No newline at end of file diff --git a/tinyfeedback/helper.py b/tinyfeedback/helper.py index 706dc5a..0b42df6 100644 --- a/tinyfeedback/helper.py +++ b/tinyfeedback/helper.py @@ -1,70 +1,76 @@ import platform -import subprocess import time import urllib +import urllib2 + from twisted.web.client import getPage +import simple_tail PORT = 8000 HOST = '127.0.0.1' - def send_once(component, data_dict): url = 'http://%s:%s/data/%s' % (HOST, PORT, component) try: - urllib.urlopen(url, data=urllib.urlencode(data_dict)) + urllib2.urlopen(url, data=urllib.urlencode(data_dict), timeout=5) except IOError: # Failed to send, just keep going pass - -def send_once_using_twisted(component, data_dict): +def send_once_using_twisted(component, data_dict, timeout=35, ignore_errors=True): url = 'http://%s:%s/data/%s' % (HOST, PORT, component) d = getPage( - str(url), - method='POST', - postdata=urllib.urlencode(data_dict), - headers={'Content-Type':'application/x-www-form-urlencoded'}, - timeout=10, - ) + str(url), + method='POST', + postdata=urllib.urlencode(data_dict), + headers={'Content-Type':'application/x-www-form-urlencoded'}, + timeout=timeout) - # swallow errors - d.addErrback(lambda x: None) + if ignore_errors: + d.addErrback(lambda x: None) + return d def tail_monitor(component, log_filename, line_callback_func, data_arg={}, - format_data_callback_func=None, interval=60): + format_data_callback_func=None, interval=60, line_callback_args=None, + log_subcomponent='monitor'): url = 'http://%s:%s/data/%s' % (HOST, PORT, component) - initial_data = data_arg - current_data = data_arg.copy() - if is_osx(): - arguments = '-F' + arguments = '-F' + else: - arguments = '--follow=name' + arguments = '--follow=name' + + # initialize a copy of that initial data + initial_data = data_arg + current_data = initial_data.copy() - tail_process = subprocess.Popen(['tail', arguments, log_filename], - stdout=subprocess.PIPE) + tail = simple_tail.SimpleTail(log_filename) last_update = time.time() while True: - line = tail_process.stdout.readline() + line = tail.readline() - if line.strip() == '': + # sleep one second if EOF or '' + if '\0' in line or line.strip() == '': time.sleep(1) else: - line_callback_func(current_data, line) + if line_callback_args: + line_callback_func(current_data, line, **line_callback_args) + else: + line_callback_func(current_data, line) current_time = time.time() if current_time - last_update >= interval: - last_update = current_time + last_update = current_time if format_data_callback_func: current_data = format_data_callback_func(current_data) diff --git a/tinyfeedback/redis_model.py b/tinyfeedback/redis_model.py index bbcbe99..da97832 100644 --- a/tinyfeedback/redis_model.py +++ b/tinyfeedback/redis_model.py @@ -1,6 +1,7 @@ import re import time +import redis import simplejson from twisted.internet import defer, protocol, reactor import txredisapi @@ -18,9 +19,10 @@ def __init__(self, host): @defer.inlineCallbacks def connect(self, poolsize=None): if not poolsize: - poolsize = 10 + poolsize = 50 - self.__redis = yield txredisapi.ConnectionPool(self.__host, poolsize=poolsize) + self.__redis = yield txredisapi.ConnectionPool(self.__host, + poolsize=poolsize) @defer.inlineCallbacks def add_username(self, username): @@ -37,12 +39,16 @@ def get_graphs_per_user(self): user_key = 'tinyfeedback:usernames' usernames = yield self.__redis.smembers(user_key) + graphs_per_user = [] + + if usernames is None or len(usernames) == 0: + defer.returnValue(graphs_per_user) + keys = ['tinyfeedback:graph:%s:all_graphs' % each_username for \ each_username in usernames] user_graphs = yield self.__redis.mget(keys) - graphs_per_user = [] for i, each_username in enumerate(usernames): if not user_graphs[i]: num_graphs = 0 @@ -103,7 +109,9 @@ def remove_graph(self, username, title): continue @defer.inlineCallbacks - def update_graph(self, username, title, timescale, fields, graph_type): + def update_graph(self, username, title, timescale, fields, graph_type, + updates_infrequently): + key = 'tinyfeedback:graph:%s:all_graphs' % username fields.sort() @@ -134,6 +142,7 @@ def update_graph(self, username, title, timescale, fields, graph_type): graphs[title]['timescale'] = timescale graphs[title]['fields'] = fields graphs[title]['graph_type'] = graph_type + graphs[title]['updates_infrequently'] = updates_infrequently yield transaction.set(key, simplejson.dumps(graphs)) @@ -171,23 +180,261 @@ def update_ordering(self, username, new_ordering): continue +class BlockingData(object): + def __init__(self, host): + self.__redis = redis.StrictRedis(host=host) + + def __get_max_min(self, subset): + min_value = min(subset) + max_value = max(subset) + + if subset.index(min_value) < subset.index(max_value): + return min_value, max_value + else: + return max_value, min_value + + def __return_up_to_date_data(self, pipeline, component, metric, update_value=None): + keys = ['tinyfeedback:data:component:%s:metric:%s:last_updated' % (component, metric), + 'tinyfeedback:data:component:%s:metric:%s:6h' % (component, metric), + 'tinyfeedback:data:component:%s:metric:%s:36h' % (component, metric), + 'tinyfeedback:data:component:%s:metric:%s:1w' % (component, metric), + 'tinyfeedback:data:component:%s:metric:%s:1m' % (component, metric), + 'tinyfeedback:data:component:%s:metric:%s:6m' % (component, metric), + ] + + data = pipeline.mget(keys) + data_changed = False + + # Load the data in the format we want + if data[0] is None: + last_updated = int(time.time()) / 60 * 60 + else: + last_updated = int(data[0]) + + if data[1] is None: + info_6h = [0] * 360 # 1 min each + else: + info_6h = simplejson.loads(data[1]) + + # HACK: backwards compatability + if isinstance(info_6h, dict): + last_updated = info_6h['last_updated'] + info_6h = info_6h['data'] + data_changed = True + + if data[2] is None: + info_36h = [0] * 432 # 5 min each + else: + info_36h = simplejson.loads(data[2]) + + # HACK: backwards compatability + if isinstance(info_36h, dict): + info_36h = info_36h['data'] + data_changed = True + + if data[3] is None: + info_1w = [0] * 336 # 30 min each + else: + info_1w = simplejson.loads(data[3]) + + # HACK: backwards compatability + if isinstance(info_1w, dict): + info_1w = info_1w['data'] + data_changed = True + + if data[4] is None: + info_1m = [0] * 360 # 2 hours each + else: + info_1m = simplejson.loads(data[4]) + + # HACK: backwards compatability + if isinstance(info_1m, dict): + info_1m = info_1m['data'] + data_changed = True + + if data[5] is None: + info_6m = [0] * 360 # 12 hours each + else: + info_6m = simplejson.loads(data[5]) + + # HACK: backwards compatability + if isinstance(info_6m, dict): + info_6m = info_6m['data'] + data_changed = True + + update_to = int(time.time()) / 60 * 60 + + while last_updated < update_to: + data_changed = True + last_updated += 60 + + # First, save the roll up values + if last_updated % 600 == 0: + first, second = self.__get_max_min(info_6h[-10:]) + info_36h[-2] = first + info_36h[-1] = second + + if last_updated % 3600 == 0: + first, second = self.__get_max_min(info_36h[-12:]) + info_1w[-2] = first + info_1w[-1] = second + + if last_updated % 14400 == 0: + first, second = self.__get_max_min(info_1w[-8:]) + info_1m[-2] = first + info_1m[-1] = second + + if last_updated % 86400 == 0: + first, second = self.__get_max_min(info_1m[-12:]) + info_6m[-2] = first + info_6m[-1] = second + + # Then, extend arrays + info_6h.append(None) + + if last_updated % 600 == 0: + info_36h.extend([0, 0]) + + if last_updated % 3600 == 0: + info_1w.extend([0, 0]) + + if last_updated % 14400 == 0: + info_1m.extend([0, 0]) + + if last_updated % 86400 == 0: + info_6m.extend([0, 0]) + + if update_value is not None or data_changed: + if update_value is not None: + info_6h[-1] = update_value + data_changed = True + + # Do partial roll ups + for (block_size, unit_size, small_array, large_array) in ( + (600, 60, info_6h, info_36h), + (3600, 300, info_36h, info_1w), + (14400, 1800, info_1w, info_1m), + (86400, 7200, info_1m, info_6m), + ): + + partial_update_range = (last_updated % block_size / + unit_size) + 1 + + first, second = self.__get_max_min( + small_array[-1*partial_update_range:]) + + large_array[-2] = first + large_array[-1] = second + + return (data_changed, last_updated, info_6h[-360:], info_36h[-432:], + info_1w[-336:], info_1m[-360:], info_6m[-360:]) + + def update_metric(self, component, metric, value): + # Make sure values are sane + if not re.match('^[A-Za-z0-9_\.:-]+$', component): + return + # XXX should return this error to the end-user + raise ValueError('Bad component: %s (must only contain A-Z, a-z, 0-9, _, -, :, and .)' % component) + + if not re.match('^[A-Za-z0-9_\.:-]+$', metric): + return + # XXX should return this error to the end-user + raise ValueError('Bad metric: %s (must only contain A-Z, a-z, 0-9, _, -, :, and .)' % metric) + + component = component[:128] + metric = metric[:128] + value = int(value) + + # Now we can actually update + keys = ['tinyfeedback:data:list_components', + 'tinyfeedback:data:component:%s:list_metrics' % component, + 'tinyfeedback:data:component:%s:metric:%s:last_updated' % (component, metric), + 'tinyfeedback:data:component:%s:metric:%s:6h' % (component, metric), + 'tinyfeedback:data:component:%s:metric:%s:36h' % (component, metric), + 'tinyfeedback:data:component:%s:metric:%s:1w' % (component, metric), + 'tinyfeedback:data:component:%s:metric:%s:1m' % (component, metric), + 'tinyfeedback:data:component:%s:metric:%s:6m' % (component, metric), + ] + + with self.__redis.pipeline() as pipeline: + while True: + try: + pipeline.watch(keys) + + data = pipeline.mget(keys[:2]) + + # Load the data + _, last_updated, info_6h, info_36h, info_1w, info_1m, \ + info_6m = self.__return_up_to_date_data(pipeline, + component, metric, value) + + pipeline.multi() + + # Make sure the component is listed + if data[0] is None: + components = [component] + pipeline.set(keys[0], simplejson.dumps(components)) + + else: + components = simplejson.loads(data[0]) + if component not in components: + components.append(component) + components.sort() + pipeline.set(keys[0], simplejson.dumps(components)) + + # Make sure the metric is listed + if data[1] is None: + metrics = [metric] + pipeline.set(keys[1], simplejson.dumps(metrics)) + + else: + metrics = simplejson.loads(data[1]) + if metric not in metrics: + metrics.append(metric) + metrics.sort() + pipeline.set(keys[1], simplejson.dumps(metrics)) + + # Store the values + pipeline.set(keys[2], last_updated) + pipeline.set(keys[3], simplejson.dumps(info_6h)) + pipeline.set(keys[4], simplejson.dumps(info_36h)) + pipeline.set(keys[5], simplejson.dumps(info_1w)) + pipeline.set(keys[6], simplejson.dumps(info_1m)) + pipeline.set(keys[7], simplejson.dumps(info_6m)) + + pipeline.execute() + break + + except redis.WatchError: + continue + class Data(object): ''' tinyfeedback:data:list_components - all components tinyfeedback:data:component::list_metrics - all metrics for a component + tinyfeedback:data:component::metric::last_updated - last update to metric tinyfeedback:data:component::metric:: - data ''' - def __init__(self, host): self.__host = host self.__update_metric_limit = defer.DeferredSemaphore(25) + def __get_max_min(self, subset): + min_value = min(subset) + max_value = max(subset) + + if subset.index(min_value) < subset.index(max_value): + return min_value, max_value + else: + return max_value, min_value + @defer.inlineCallbacks def connect(self, poolsize=None): if not poolsize: - poolsize = 100 + poolsize = 200 - self.__redis = yield txredisapi.ConnectionPool(self.__host, poolsize=poolsize) + self.__redis = yield txredisapi.ConnectionPool(self.__host, + poolsize=poolsize) @defer.inlineCallbacks def get_components(self): @@ -228,23 +475,20 @@ def delete_metrics_older_than_a_week(self, component): metric_changed = False for each_metric in metrics: - metric_keys = ['tinyfeedback:data:component:%s:metric:%s:6h' % (component, each_metric), + metric_keys = ['tinyfeedback:data:component:%s:metric:%s:last_updated' % (component, each_metric), + 'tinyfeedback:data:component:%s:metric:%s:6h' % (component, each_metric), 'tinyfeedback:data:component:%s:metric:%s:36h' % (component, each_metric), 'tinyfeedback:data:component:%s:metric:%s:1w' % (component, each_metric), 'tinyfeedback:data:component:%s:metric:%s:1m' % (component, each_metric), 'tinyfeedback:data:component:%s:metric:%s:6m' % (component, each_metric), ] - info_6h = yield self.__redis.get(metric_keys[0]) + last_updated = yield self.__redis.get(metric_keys[0]) - if not info_6h: + if not last_updated: continue - info_6h = simplejson.loads(info_6h) - - if current_time_slot - info_6h['last_updated'] > \ - (7 * 24 * 60 * 60): - + if current_time_slot - last_updated > (7 * 24 * 60 * 60): metric_changed = True metrics.remove(each_metric) @@ -275,11 +519,10 @@ def get_metrics(self, component): defer.returnValue(simplejson.loads(metrics)) @defer.inlineCallbacks - def get_data(self, component, metric, timescale): - key = 'tinyfeedback:data:component:%s:metric:%s:%s' % (component, metric, timescale) - + def get_data(self, component, metric, timescale, updates_infrequently=False): keys = ['tinyfeedback:data:list_components', 'tinyfeedback:data:component:%s:list_metrics' % component, + 'tinyfeedback:data:component:%s:metric:%s:last_updated' % (component, metric), 'tinyfeedback:data:component:%s:metric:%s:6h' % (component, metric), 'tinyfeedback:data:component:%s:metric:%s:36h' % (component, metric), 'tinyfeedback:data:component:%s:metric:%s:1w' % (component, metric), @@ -293,9 +536,13 @@ def get_data(self, component, metric, timescale): try: transaction = yield self.__redis.multi(keys) - info_6h = yield self.__redis.get(keys[2]) + # If the metric does not exist, just return 0's + metrics = yield self.__redis.get(keys[1]) + + if metrics is not None: + metrics = simplejson.loads(metrics) - if not info_6h: + if metrics is None or metric not in metrics: if timescale in ['6h', '1m', '6m']: yield transaction.discard() defer.returnValue([0] * 360) @@ -305,56 +552,51 @@ def get_data(self, component, metric, timescale): elif timescale == '1w': yield transaction.discard() defer.returnValue([0] * 336) - else: - info_6h = simplejson.loads(info_6h) - - current_time_slot = int(time.time()) / 60 * 60 - time_since_update = current_time_slot - info_6h['last_updated'] - - # If we haven't updated in over 10 minutes, do a long roll up - if time_since_update / 60 > 10: - yield self.__do_long_roll_up(keys, transaction, time_since_update, - info_6h) - - info_6h['last_updated'] = current_time_slot - yield transaction.set(keys[2], simplejson.dumps(info_6h)) - - # Otherwise do the normal roll up - elif time_since_update > 0: - while current_time_slot > info_6h['last_updated']: - info_6h['updates_since_last_roll_up'] += 1 - info_6h['last_updated'] += 60 - info_6h['data'].append(0) - if info_6h['updates_since_last_roll_up'] >= 10: - yield self.__do_roll_up(keys, transaction, info_6h) + # Try to get the data + data_changed, last_updated, info_6h, info_36h, info_1w, \ + info_1m, info_6m = yield self.__return_up_to_date_data( + component, metric) - info_6h['updates_since_last_roll_up'] -= 10 + if data_changed: + yield transaction.mset({keys[2]: last_updated, + keys[3]: simplejson.dumps(info_6h), + keys[4]: simplejson.dumps(info_36h), + keys[5]: simplejson.dumps(info_1w), + keys[6]: simplejson.dumps(info_1m), + keys[7]: simplejson.dumps(info_6m), + }) - # Truncate data to the most recent values - info_6h['data'] = info_6h['data'][-360:] + yield transaction.commit() - info_6h['last_updated'] = current_time_slot - yield transaction.set(keys[2], simplejson.dumps(info_6h)) + else: + yield transaction.discard() - yield transaction.commit() break except txredisapi.WatchError: continue - data = yield self.__redis.get(key) + if timescale == '6h': + data = info_6h + elif timescale == '36h': + data = info_36h + elif timescale == '1w': + data = info_1w + elif timescale == '1m': + data = info_1m + elif timescale == '6m': + data = info_6m - if not data: - if timescale in ['6h', '1m', '6m']: - defer.returnValue([0] * 360) - elif timescale == '36h': - defer.returnValue([0] * 432) - elif timescale == '1w': - defer.returnValue([0] * 336) - else: - data = simplejson.loads(data) - defer.returnValue(data['data']) + last_seen_value = 0 + for i in xrange(len(data)): + if data[i] is None: + data[i] = last_seen_value + + if updates_infrequently and data[i] is not None: + last_seen_value = data[i] + + defer.returnValue(data) @defer.inlineCallbacks def delete_data(self, component, metric=None): @@ -394,6 +636,7 @@ def delete_data(self, component, metric=None): # Delete the data for each_metric in metrics_to_delete: metric_keys = [ + 'tinyfeedback:data:component:%s:metric:%s:last_updated' % (component, each_metric), 'tinyfeedback:data:component:%s:metric:%s:6h' % (component, each_metric), 'tinyfeedback:data:component:%s:metric:%s:36h' % (component, each_metric), 'tinyfeedback:data:component:%s:metric:%s:1w' % (component, each_metric), @@ -427,23 +670,8 @@ def delete_data(self, component, metric=None): continue @defer.inlineCallbacks - def update_metric(self, component, metric, value): - # Make sure values are sane - if not re.match('^[A-Za-z0-9_\.:-]+$', component): - raise ValueError('Bad component: %s (must only contain A-Z, a-z, 0-9, _, -, :, and .)' % component) - - if not re.match('^[A-Za-z0-9_\.:-]+$', metric): - raise ValueError('Bad metric: %s (must only contain A-Z, a-z, 0-9, _, -, :, and .)' % metric) - - yield self.__update_metric_limit.acquire() - - component = component[:128] - metric = metric[:128] - value = int(value) - - # Now we can actually update - keys = ['tinyfeedback:data:list_components', - 'tinyfeedback:data:component:%s:list_metrics' % component, + def __return_up_to_date_data(self, component, metric, update_value=None): + keys = ['tinyfeedback:data:component:%s:metric:%s:last_updated' % (component, metric), 'tinyfeedback:data:component:%s:metric:%s:6h' % (component, metric), 'tinyfeedback:data:component:%s:metric:%s:36h' % (component, metric), 'tinyfeedback:data:component:%s:metric:%s:1w' % (component, metric), @@ -451,260 +679,129 @@ def update_metric(self, component, metric, value): 'tinyfeedback:data:component:%s:metric:%s:6m' % (component, metric), ] - while True: - try: - transaction = yield self.__redis.multi(keys) - - components, metrics = yield self.__redis.mget(keys[:2]) - info_6h = yield self.__redis.get(keys[2]) - - # Make sure component is listed - if not components: - components = [component] - yield transaction.set(keys[0], simplejson.dumps(components)) - - else: - components = simplejson.loads(components) - if component not in components: - components.append(component) - components.sort() - yield transaction.set(keys[0], - simplejson.dumps(components)) - - # Make sure metric is listed - if not metrics: - metrics = [metric] - yield transaction.set(keys[1], simplejson.dumps(metrics)) - - else: - metrics = simplejson.loads(metrics) - if metric not in metrics: - metrics.append(metric) - metrics.sort() - yield transaction.set(keys[1], - simplejson.dumps(metrics)) - - # Now we're actually ready to deal with the data - current_time_slot = int(time.time()) / 60 * 60 - - if not info_6h: - info_6h = {'data': [0] * 360, # Every 1 min - 'updates_since_last_roll_up': 0, - 'last_updated': current_time_slot, - } - - else: - info_6h = simplejson.loads(info_6h) - - time_since_update = current_time_slot - info_6h['last_updated'] - - # If we haven't updated in over 10 minutes, do a long roll up - if time_since_update / 60 > 10: - yield self.__do_long_roll_up(keys, transaction, - time_since_update, info_6h) - - # Otherwise do the normal roll up - else: - while current_time_slot > info_6h['last_updated']: - info_6h['updates_since_last_roll_up'] += 1 - info_6h['last_updated'] += 60 - info_6h['data'].append(0) - - if info_6h['updates_since_last_roll_up'] >= 10: - # Make sure the value is set before roll up - if current_time_slot == info_6h['last_updated']: - info_6h['data'][-1] = value - - yield self.__do_roll_up(keys, transaction, info_6h) - - info_6h['updates_since_last_roll_up'] -= 10 - - # Truncate data to the most recent values - info_6h['data'] = info_6h['data'][-360:] - - # At last, update the value - info_6h['data'][-1] = value - info_6h['last_updated'] = current_time_slot - - yield transaction.set(keys[2], simplejson.dumps(info_6h)) - - yield transaction.commit() - break - - except txredisapi.WatchError: - continue - - yield self.__update_metric_limit.release() + data = yield self.__redis.mget(keys) + data_changed = False - @defer.inlineCallbacks - def __load_long_data(self, keys, transaction): - info_36h, info_1w, info_1m, info_6m = yield self.__redis.mget(keys[3:]) - - # Makes sure the data is loaded - if not info_36h: - info_36h = {'data': [0] * 432, # Every 5 min - 'updates_since_last_roll_up': 0, - } - - yield transaction.set(keys[3], simplejson.dumps(info_36h)) + # Load the data in the format we want + if data[0] is None: + last_updated = int(time.time()) / 60 * 60 else: - info_36h = simplejson.loads(info_36h) + last_updated = data[0] - if not info_1w: - info_1w = {'data': [0] * 336, # Every 30 min - 'updates_since_last_roll_up': 0, - } - - yield transaction.set(keys[4], simplejson.dumps(info_1w)) + if data[1] is None: + info_6h = [0] * 360 # 1 min each else: - info_1w = simplejson.loads(info_1w) + info_6h = simplejson.loads(data[1]) - if not info_1m: - info_1m = {'data': [0] * 360, # Every 2 hours - 'updates_since_last_roll_up': 0, - } + # HACK: backwards compatability + if isinstance(info_6h, dict): + last_updated = info_6h['last_updated'] + info_6h = info_6h['data'] + data_changed = True - yield transaction.set(keys[5], simplejson.dumps(info_1m)) + if data[2] is None: + info_36h = [0] * 432 # 5 min each else: - info_1m = simplejson.loads(info_1m) + info_36h = simplejson.loads(data[2]) - if not info_6m: - info_6m = {'data': [0] * 360, # Every 12 hours - } + # HACK: backwards compatability + if isinstance(info_36h, dict): + info_36h = info_36h['data'] + data_changed = True - yield transaction.set(keys[6], simplejson.dumps(info_6m)) + if data[3] is None: + info_1w = [0] * 336 # 30 min each else: - info_6m = simplejson.loads(info_6m) - - defer.returnValue((info_36h, info_1w, info_1m, info_6m)) - - @defer.inlineCallbacks - def __do_roll_up(self, keys, transaction, info_6h): - info_36h, info_1w, info_1m, info_6m = yield self.__load_long_data( - keys, transaction) + info_1w = simplejson.loads(data[3]) - # Roll up for 36h - subset = info_6h['data'][-10:] - min_value = min(subset) - max_value = max(subset) + # HACK: backwards compatability + if isinstance(info_1w, dict): + info_1w = info_1w['data'] + data_changed = True - if subset.index(min_value) < subset.index(max_value): - info_36h['data'].extend([min_value, max_value]) + if data[4] is None: + info_1m = [0] * 360 # 2 hours each else: - info_36h['data'].extend([max_value, min_value]) - - info_36h['updates_since_last_roll_up'] += 2 - info_36h['data'] = info_36h['data'][2:] - - # Roll up for 1w - if info_36h['updates_since_last_roll_up'] >= 12: - info_36h['updates_since_last_roll_up'] -= 12 + info_1m = simplejson.loads(data[4]) - subset = info_36h['data'][-12:] - min_value = min(subset) - max_value = max(subset) + # HACK: backwards compatability + if isinstance(info_1m, dict): + info_1m = info_1m['data'] + data_changed = True - if subset.index(min_value) < subset.index(max_value): - info_1w['data'].extend([min_value, max_value]) - else: - info_1w['data'].extend([max_value, min_value]) - - info_1w['updates_since_last_roll_up'] += 2 - info_1w['data'] = info_1w['data'][2:] - - # Roll up for 1m - if info_1w['updates_since_last_roll_up'] >= 8: - info_1w['updates_since_last_roll_up'] -= 8 - - subset = info_1w['data'][-8:] - min_value = min(subset) - max_value = max(subset) - - if subset.index(min_value) < subset.index(max_value): - info_1m['data'].extend([min_value, max_value]) - else: - info_1m['data'].extend([max_value, min_value]) - - info_1m['updates_since_last_roll_up'] += 2 - info_1m['data'] = info_1m['data'][2:] - - # Roll up for 6m - if info_1m['updates_since_last_roll_up'] >= 12: - info_1m['updates_since_last_roll_up'] -= 12 - - subset = info_1m['data'][-12:] - min_value = min(subset) - max_value = max(subset) + if data[5] is None: + info_6m = [0] * 360 # 12 hours each + else: + info_6m = simplejson.loads(data[5]) - if subset.index(min_value) < subset.index(max_value): - info_6m['data'].extend([min_value, max_value]) - else: - info_6m['data'].extend([max_value, min_value]) + # HACK: backwards compatability + if isinstance(info_6m, dict): + info_6m = info_6m['data'] + data_changed = True - info_6m['data'] = info_6m['data'][2:] + update_to = int(time.time()) / 60 * 60 - yield transaction.set(keys[3], simplejson.dumps(info_36h)) - yield transaction.set(keys[4], simplejson.dumps(info_1w)) - yield transaction.set(keys[5], simplejson.dumps(info_1m)) - yield transaction.set(keys[6], simplejson.dumps(info_6m)) + while last_updated < update_to: + data_changed = True + last_updated += 60 - @defer.inlineCallbacks - def __do_long_roll_up(self, keys, transaction, time_since_update, info_6h): - info_36h, info_1w, info_1m, info_6m = yield self.__load_long_data( - keys, transaction) + # First, save the roll up values + if last_updated % 600 == 0: + first, second = self.__get_max_min(info_6h[-10:]) + info_36h[-2] = first + info_36h[-1] = second - # Roll up for 6h - needed_updates = time_since_update / 60 - needed_updates_floor = min(needed_updates, 360) - info_6h['data'].extend([0] * needed_updates_floor) - info_6h['data'] = info_6h['data'][-360:] - info_6h['updates_since_last_roll_up'] += needed_updates + if last_updated % 3600 == 0: + first, second = self.__get_max_min(info_36h[-12:]) + info_1w[-2] = first + info_1w[-1] = second - needed_updates = info_6h['updates_since_last_roll_up'] / 10 - info_6h['updates_since_last_roll_up'] %= 10 + if last_updated % 14400 == 0: + first, second = self.__get_max_min(info_1w[-8:]) + info_1m[-2] = first + info_1m[-1] = second - yield transaction.set(keys[2], simplejson.dumps(info_6h)) + if last_updated % 86400 == 0: + first, second = self.__get_max_min(info_1m[-12:]) + info_6m[-2] = first + info_6m[-1] = second - # Roll up for 36h - if needed_updates > 0: - needed_updates_floor = min(needed_updates, 432 / 2) - info_36h['data'].extend([0] * 2 * needed_updates_floor) - info_36h['data'] = info_36h['data'][-432:] - info_36h['updates_since_last_roll_up'] += needed_updates + # Then, extend arrays + info_6h.append(None) - needed_updates = info_36h['updates_since_last_roll_up'] / 12 - info_36h['updates_since_last_roll_up'] %= 12 + if last_updated % 600 == 0: + info_36h.extend([0, 0]) - yield transaction.set(keys[3], simplejson.dumps(info_36h)) + if last_updated % 3600 == 0: + info_1w.extend([0, 0]) - # Roll up for 1w - if needed_updates > 0: - needed_updates_floor = min(needed_updates, 336 / 2) - info_1w['data'].extend([0] * 2 * needed_updates_floor) - info_1w['data'] = info_1w['data'][-336:] - info_1w['updates_since_last_roll_up'] += needed_updates + if last_updated % 14400 == 0: + info_1m.extend([0, 0]) - needed_updates = info_1w['updates_since_last_roll_up'] / 8 - info_1w['updates_since_last_roll_up'] %= 8 + if last_updated % 86400 == 0: + info_6m.extend([0, 0]) - yield transaction.set(keys[4], simplejson.dumps(info_1w)) + if update_value is not None or data_changed: + if update_value is not None: + info_6h[-1] = update_value + data_changed = True - # Roll up for 1m - if needed_updates > 0: - needed_updates_floor = min(needed_updates, 360 / 2) - info_1m['data'].extend([0] * 2 * needed_updates_floor) - info_1m['data'] = info_1m['data'][-360:] - info_1m['updates_since_last_roll_up'] += needed_updates + # Do partial roll ups + for (block_size, unit_size, small_array, large_array) in ( + (600, 60, info_6h, info_36h), + (3600, 300, info_36h, info_1w), + (14400, 1800, info_1w, info_1m), + (86400, 7200, info_1m, info_6m), + ): - needed_updates = info_1m['updates_since_last_roll_up'] / 12 - info_1m['updates_since_last_roll_up'] %= 12 + partial_update_range = (last_updated % block_size / + unit_size) + 1 - yield transaction.set(keys[5], simplejson.dumps(info_1m)) + first, second = self.__get_max_min( + small_array[-1*partial_update_range:]) - # Roll up for 6m - if needed_updates > 0: - needed_updates_floor = min(needed_updates, 360 / 2) - info_6m['data'].extend([0] * 2 * needed_updates_floor) - info_6m['data'] = info_6m['data'][-360:] + large_array[-2] = first + large_array[-1] = second - yield transaction.set(keys[6], simplejson.dumps(info_6m)) + defer.returnValue((data_changed, last_updated, info_6h[-360:], + info_36h[-432:], info_1w[-336:], info_1m[-360:], info_6m[-360:])) diff --git a/tinyfeedback/static/css/style.css b/tinyfeedback/static/css/style.css index fb1e19b..d98d1d0 100644 --- a/tinyfeedback/static/css/style.css +++ b/tinyfeedback/static/css/style.css @@ -3,18 +3,10 @@ body { margin: 10px; } -#cursorlabel { - position: absolute; -} - h2.error { color: red; } -h3 { - font-family: sans-serif; -} - a { font-size: small; } @@ -35,8 +27,15 @@ span.login { float: right; } -span.remove { +span.graph_controls { float: right; + display: none; +} + +span.graph_title { + font-family: sans-serif; + font-size: medium; + font-weight: bold; } table { @@ -102,3 +101,71 @@ a:link.bare { stroke: rgba(0, 0, 0, 0.10); stroke-width: 1px; } + +.ui-menu { + background-color: white; + max-width: 450px; + border: 2px solid gray; + max-height: 200px; + overflow-y: scroll; +} + +.wildcard { + width: 500px; +} + +.dashboard-table { + padding-left: 20px; + border-collapse: collapse; +} +.dashboard-table th { + text-align: left; +} +.dashboard-table td { + padding-bottom: 5px; +} + +#infoshow { + font-style: italic; +} + +#info { + border: 1px solid black; + padding: 10px; + display: none; + vertical-align: top; +} + +.stats-table { + border-collapse: collapse; + border: 2px solid black; + text-align: center; +} +.stats-table td { + border: 1px solid black; + padding: 5px; +} +.stats-table th { + font-weight: bold; + border: 1px solid black; + padding: 5px; +} + +.color-table { + border-collapse: collapse; + border: 1px solid black; +} + +.tabledata { + min-width: 20px; + padding-left: 5px; + padding-right: 5px; +} + +#colorbutton, .statsbutton, .statsgraph { + display: none; +} + +.colorOverride { + background-color: white !important; +} diff --git a/tinyfeedback/templates/component.mako b/tinyfeedback/templates/component.mako index 9edc0b5..0a59b06 100644 --- a/tinyfeedback/templates/component.mako +++ b/tinyfeedback/templates/component.mako @@ -30,7 +30,7 @@ <%include file="login.mako" args="username='${username}'"/> -

tf :: components :: ${component}

+

tf :: components :: ${component}

% for each_timescale in timescales: % if timescale == each_timescale: diff --git a/tinyfeedback/templates/components.mako b/tinyfeedback/templates/components.mako new file mode 100644 index 0000000..2303ebc --- /dev/null +++ b/tinyfeedback/templates/components.mako @@ -0,0 +1,16 @@ + + + + + + + + <%include file="login.mako" args="username='${username}'"/> +

tf :: components

+
    + % for component in components: +
  • ${component}
  • + % endfor +
+ + diff --git a/tinyfeedback/templates/d3.mako b/tinyfeedback/templates/d3.mako index 4ac6b1a..3f7a5f3 100644 --- a/tinyfeedback/templates/d3.mako +++ b/tinyfeedback/templates/d3.mako @@ -1,10 +1,32 @@ diff --git a/tinyfeedback/templates/dashboards.mako b/tinyfeedback/templates/dashboards.mako index 45a045a..b2fcf48 100644 --- a/tinyfeedback/templates/dashboards.mako +++ b/tinyfeedback/templates/dashboards.mako @@ -11,19 +11,24 @@ % if len(graphs_per_user) == 0: None yet! Log in above! % else: -
    - % for username, graph_count in graphs_per_user: -
  • - ${username}'s dashboard - - ${graph_count} - % if graph_count == 1: - graph - % else: - graphs - % endif -
  • - % endfor -
+ + + + + + % for username, graph_count in graphs_per_user: + + + + + % endfor +
DashboardNumber of graphs
${username}${graph_count} + % if graph_count == 1: + graph + % else: + graphs + % endif +
% endif diff --git a/tinyfeedback/templates/edit.mako b/tinyfeedback/templates/edit.mako index 5f9839d..3ef83e4 100644 --- a/tinyfeedback/templates/edit.mako +++ b/tinyfeedback/templates/edit.mako @@ -4,21 +4,10 @@ + <%include file="login.mako" args="username='${username}'"/> @@ -67,12 +100,14 @@ % elif kwargs['error'] == 'no_fields': You must specify at least one field % elif kwargs['error'] == 'bad_wildcard_filter': - Wildcards must contain a "|" + Fields must contain a "|" % endif % endif +

tf :: edit graph

+

Note that if you modified the graph type or timescale on the previous page those changes are propogated below

Title:

Timescale:

- -
    - -
  • Wildcard Items -

    Type the name of the component and metric you want i.e. component|metric* or *|metric

    -
      - % for item in fields: - % if '*' in item: -
    • - % endif - % endfor - -
    • -
    - -
  • -
    +

    + % if updates_infrequently: + + % else: + + % endif + Graph Values Don't Update Once Per Minute (so don't drop values to 0) +

    -
  • All Components -

    Filter Metrics:

    -
      - % for component, metrics in data_sources.iteritems(): - % if component in active_components: -
    • - ${component}
    • - % else: -
    • + ${component}
    • - % endif -
        - % for metric in metrics: - % if component in active_components: -
      • - % else: -
      • - % endif - % if '%s|%s' % (component, metric) in fields: - ${cgi.escape(metric)} - % else: - ${cgi.escape(metric)} - % endif -
      • - % endfor -
      - % endfor -
    -
  • + Fields +

    Search for the specific component and metric you want to add or write it in yourself. Supports wildcard queries i.e. component|metric* or *|metric.

    + + + + + + + + + + +
    Component:Metric:
    +
    +
      + % for item in fields: +
    • + % endfor +
    -
    + +

    +
    +
    diff --git a/tinyfeedback/templates/graph.mako b/tinyfeedback/templates/graph.mako index 4740e7c..af56a7d 100644 --- a/tinyfeedback/templates/graph.mako +++ b/tinyfeedback/templates/graph.mako @@ -1,58 +1,233 @@ - - - - - - - -
    - <%include file="d3.mako"/> - <%include file="login.mako" args="username='${username}'"/> -

    tf :: view graph

    - - % if username is not None and username != graph_username: - % for (graph_name, graph_name_urlencoded, graph_type, graph_type_urlencoded, timescale, time_per_data_point, line_names, data, current_time, length, max_value) in graph: -
    - - - - - -
    - % endfor - % elif username is None: - Want this graph on your dasboard? Log in above! - % endif - - - - -
    - % for (graph_name, graph_name_urlencoded, graph_type, graph_type_urlencoded, timescale, time_per_data_point, line_names, data, current_time, length, max_value) in graph: -

    ${graph_name}

    - - % endfor -
    - - + + + + + + + + +
    + <%include file="d3.mako"/> + <%include file="login.mako" args="username='${username}'"/> +

    tf :: view graph

    + + % if username is not None and username != graph_username: +
    + + + + + +
    +
    + % elif username is None: + Want this graph on your dashboard? Log in above! +
    + % endif + +
    + % for timescale_value in ['6h', '36h', '1w', '1m', '6m']: + ${timescale_value} + % endfor +
    + % for graphtype in ['line', 'stacked']: + ${graphtype} + % endfor +
    + % for timezone_value in ['local', 'UTC']: + ${timezone_value} + % endfor +
    + autorefresh on + autorefresh off +
    + +
    +
    + + + + + + +
    +

    + + ${title} + +

    loading...

    + % if username == graph_username: + + edit + +
    + % endif + + stats + +
    + info> +

    +
    +
    + **Autofresh will refresh the page every minute
    + **Changing the timezone in quickswitch will not affect your global timezone setting. To change your global timezone setting use the option in the top right above login.
    + **Data is collected over a time frame dependent on the timescale. +
    + 6h: every minute +
    + 36h: 5 minutes +
    + 1w: 30 minutes +
    + 1m: 2 hours +
    + 6m: 12 hours +
    + + diff --git a/tinyfeedback/templates/index.mako b/tinyfeedback/templates/index.mako index e80d12c..43ea379 100644 --- a/tinyfeedback/templates/index.mako +++ b/tinyfeedback/templates/index.mako @@ -1,95 +1,172 @@ - - - - - - - -
    - % if edit is not None: - - % endif + + + + + + + <%include file="d3.mako"/> + - - - - % endfor -
-
-
- % endif + % if timezone == 'local': + var current_time = local_time.getTime(); + % else: + var current_time = (local_time.getTime() + local_time.getTimezoneOffset()*60*1000); + % endif + + time.forEach(function(each, i) { + time[i] = new Date(current_time + ((i - length) * time_per_data_point)); + }); + + var legendHeight = 10*line_names.length; + var graphHeight = 220 + legendHeight; + + custom_graph("graph_" + index, line_names, data_rows, max_value, time, time_per_data_point, graph_type); + + $("#edit_" + index).attr('href', "/edit?title=" + title_urlencoded + "&timescale=" + timescale + "&graph_type=" + graph_type_urlencoded); + $("#remove_" + index).attr('href', "/edit?title=" + title_urlencoded + "&delete=true"); + $("#stats_" + index).attr('href', "/stats/${dashboard_username}/" + title_urlencoded + "?timescale=" + timescale + "&graph_type=" + graph_type_urlencoded); + $("#graph_" + index).attr('href', "/graph/${dashboard_username}/" + title_urlencoded + "?timescale=" + timescale + "&graph_type=" + graph_type_urlencoded); + } + + function callForData(title, index) { + $.ajax({ + type: 'GET', + url: '/graphdata/${dashboard_username}/' + title, + data: {}, + contentType: "application/json; charset=utf-8", + dataType: "json", + async: true, + success: function (data) { + createGraph(data, index); + }, + error: function(xhr, status) { + console.log("error loading data"); + console.log("hatada: " + xhr.responseXML); + }, + complete: function() { + $("#graph_" + index + " .loading").hide(); + } + }); + } - % if len(components) > 0: -

Components

-
    - % for each in components: -
  • ${each}
  • - % endfor -
+ % for title in graph_titles: + callForData('${title}', '${graph_titles.index(title)}'); + % endfor + + +
+ % if edit is not None: + + % endif + % if username == dashboard_username: + + % endif + + <%include file="d3.mako"/> + <%include file="login.mako" args="username='${username}'"/> +

+ tf :: dashboards + % if dashboard_username: + :: ${dashboard_username} + % endif +

+ + % if username is not None or dashboard_username is not None: + new graph +
+ % if edit is not None: + freeze graphs Drag graphs to re-order them! + % elif username == dashboard_username: + move graphs + % endif + + + % if len(graph_titles) == 0: + % if username == dashboard_username: + Want custom graphs? Click new graph to create one! + % else: + No graphs yet! % endif - + % endif + + +
+
+ % endif + diff --git a/tinyfeedback/templates/login.mako b/tinyfeedback/templates/login.mako index a75f19e..c361fc4 100644 --- a/tinyfeedback/templates/login.mako +++ b/tinyfeedback/templates/login.mako @@ -1,4 +1,17 @@
diff --git a/tinyfeedback/templates/statistics.mako b/tinyfeedback/templates/statistics.mako new file mode 100644 index 0000000..9cba81c --- /dev/null +++ b/tinyfeedback/templates/statistics.mako @@ -0,0 +1,83 @@ + + + + + + + + + + + <%include file="statsJS.mako"/> + + +
+ <%include file="login.mako" args="username='${username}'"/> +

tf :: view graph :: statistics

+

${title}

+
+
+

loading...

+
+
+ + +

+ +
+

Raw Data: red-highest values, white-lowest values across all columns

+
+
+

Percent Change: the percent change over five data points [columns are independent]

+
+ + diff --git a/tinyfeedback/templates/statsJS.mako b/tinyfeedback/templates/statsJS.mako new file mode 100644 index 0000000..cd65284 --- /dev/null +++ b/tinyfeedback/templates/statsJS.mako @@ -0,0 +1,211 @@ + diff --git a/tinyfeedback/webserver.py b/tinyfeedback/webserver.py index 2f506ae..cd69b45 100644 --- a/tinyfeedback/webserver.py +++ b/tinyfeedback/webserver.py @@ -6,10 +6,12 @@ import cgi import logging import logging.handlers +import multiprocessing import os +import Queue +import re import time import urllib -import re import mako.template import mako.lookup @@ -22,6 +24,8 @@ import redis_model +import numpy as np +import pandas as pd def straighten_out_request(f): # The twisted request dictionary return values as lists, this un-does that @@ -57,13 +61,15 @@ def wrapped_f(*args, **kwargs): class Controller(object): - def __init__(self, redis_model_data, redis_model_graph, log): + def __init__(self, redis_model_data, redis_model_graph, queue, log): self.__redis_model_data = redis_model_data self.__redis_model_graph = redis_model_graph + self.__queue = queue self._log = log self.timescales = ['6h', '36h', '1w', '1m', '6m'] self.graph_types = ['line', 'stacked'] + self.timezones = ['UTC', 'local'] # Set up template lookup directory self.__template_lookup = mako.lookup.TemplateLookup( @@ -74,87 +80,110 @@ def __init__(self, redis_model_data, redis_model_graph, log): @straighten_out_request def get_index(self, request): username = request.getCookie('username') + timezone = request.getCookie('timezone') + + if timezone is None: + current_utc_time = datetime.datetime.utcnow() + current_utc_time += datetime.timedelta(days=365) + expires_str = current_utc_time.strftime('%a, %d-%b-%Y %H:%M:%S GMT') + + request.addCookie('timezone', 'local', expires=expires_str) + timezone = 'local' if 'edit' in request.args: edit = request.args['edit'] else: edit = None - self.__finish_get_index(request, username, edit) + self.__finish_get_index(request, username, timezone, edit) return NOT_DONE_YET @defer.inlineCallbacks - def __finish_get_index(self, request, username, edit): - components = yield self.__redis_model_data.get_components() + def __finish_get_index(self, request, username, timezone, edit): - # Look up custom graphs for this user if username is not None: graphs = yield self.__redis_model_graph.get_graphs(username) else: graphs = {} - graph_data = [None] * len(graphs) + graph_titles = [None] * len(graphs) + graph_titles_urlencoded = [None] * len(graphs) for title, each_graph in graphs.iteritems(): - graph_data[each_graph['ordering']] = yield self.__get_graph_details( - title, each_graph) + graph_titles[each_graph['ordering']] = title + graph_titles_urlencoded[each_graph['ordering']] = urllib.quote_plus(title).replace('%2F', '$2F') template = self.__template_lookup.get_template('index.mako') - ret = template.render(components=components, username=username, - dashboard_username=username, edit=edit, graphs=graph_data, - cgi=cgi).encode('utf8') + ret = template.render(username=username, + dashboard_username=username, edit=edit, graph_titles=graph_titles, graph_titles_urlencoded=graph_titles_urlencoded, + timezone=timezone, cgi=cgi).encode('utf8') request.write(ret) request.finish() - def get_dashboards(self, request): + def get_user_dashboard(self, request, dashboard_username): username = request.getCookie('username') - self.__finish_get_dashboards(request, username) - - return NOT_DONE_YET - - @defer.inlineCallbacks - def __finish_get_dashboards(self, request, username): - graphs_per_user = yield self.__redis_model_graph.get_graphs_per_user() + timezone = request.getCookie('timezone') - template = self.__template_lookup.get_template('dashboards.mako') + if timezone is None: + current_utc_time = datetime.datetime.utcnow() + current_utc_time += datetime.timedelta(days=365) + expires_str = current_utc_time.strftime('%a, %d-%b-%Y %H:%M:%S GMT') - page = template.render(username=username, - graphs_per_user=graphs_per_user).encode('utf8') + request.addCookie('timezone', 'local', expires=expires_str) + timezone = 'local' - request.write(page) - request.finish() - - def get_user_dashboards(self, request, dashboard_username): - username = request.getCookie('username') - self.__finish_get_user_dashboards(request, dashboard_username, - username) + self.__finish_get_user_dashboard(request, dashboard_username, + username, timezone) return NOT_DONE_YET @defer.inlineCallbacks - def __finish_get_user_dashboards(self, request, dashboard_username, - username): + def __finish_get_user_dashboard(self, request, dashboard_username, + username, timezone): graphs = yield self.__redis_model_graph.get_graphs(dashboard_username) - graph_data = [None] * len(graphs) + graph_titles = [None] * len(graphs) + graph_titles_urlencoded = [None] * len(graphs) for title, each_graph in graphs.iteritems(): - graph_data[each_graph['ordering']] = yield self.__get_graph_details( - title, each_graph) + graph_titles[each_graph['ordering']] = title + graph_titles_urlencoded[each_graph['ordering']] = urllib.quote_plus(title).replace('%2F', '$2F') template = self.__template_lookup.get_template('index.mako') - ret = template.render(components=[], username=username, + ret = template.render(username=username, dashboard_username=dashboard_username, edit=None, - graphs=graph_data, cgi=cgi).encode('utf8') + graph_titles=graph_titles, graph_titles_urlencoded=graph_titles_urlencoded, timezone=timezone, cgi=cgi).encode('utf8') request.write(ret) request.finish() + def get_dashboards(self, request): + username = request.getCookie('username') + timezone = request.getCookie('timezone') + if timezone is None: + timezone = 'local' + + self.__finish_get_dashboards(request, username, timezone) + + return NOT_DONE_YET + + @defer.inlineCallbacks + def __finish_get_dashboards(self, request, username, timezone): + graphs_per_user = yield self.__redis_model_graph.get_graphs_per_user() + + template = self.__template_lookup.get_template('dashboards.mako') + + page = template.render(username=username, + graphs_per_user=graphs_per_user, timezone=timezone).encode('utf8') + + request.write(page) + request.finish() + def delete_user(self, request, dashboard_username): self.__finish_delete_user(request, dashboard_username) return NOT_DONE_YET @@ -222,6 +251,9 @@ def __finish_get_component(self, request, component, username, timescale): @straighten_out_request def get_edit(self, request): username = request.getCookie('username') + timezone = request.getCookie('timezone') + if timezone is None: + timezone = 'local' title = request.args.get('title', '') title = urllib.unquote_plus(title.replace('$2F', '%2F')) @@ -234,12 +266,12 @@ def get_edit(self, request): request.redirect('/') return '' - self.__finish_get_edit(request, username, title) + self.__finish_get_edit(request, username, title, timezone) return NOT_DONE_YET @defer.inlineCallbacks - def __finish_get_edit(self, request, username, title): + def __finish_get_edit(self, request, username, title, timezone): data_sources = {} components = yield self.__redis_model_data.get_components() @@ -254,10 +286,12 @@ def __finish_get_edit(self, request, username, title): if title and title in graphs: fields = graphs[title]['fields'] active_components = [each.split('|')[0] for each in fields] + updates_infrequently = graphs[title].get('updates_infrequently', False) else: fields = [] active_components = [] + updates_infrequently = False graph_type = request.args.get('graph_type', '') @@ -265,8 +299,9 @@ def __finish_get_edit(self, request, username, title): ret = template.render(kwargs=request.args, fields=fields, data_sources=data_sources, active_components=active_components, - username=username, timescales=self.timescales, - graph_types=self.graph_types, cgi=cgi).encode('utf8') + updates_infrequently=updates_infrequently, username=username, + timescales=self.timescales, graph_types=self.graph_types, timezone=timezone, + cgi=cgi).encode('utf8') request.write(ret) request.finish() @@ -294,16 +329,16 @@ def post_edit(self, request): title = request.args['title'] timescale = request.args['timescale'] graph_type = request.args['graph_type'] + updates_infrequently = request.args.get('updates_infrequently', False) keys = request.args.keys() - index = keys.index('title') - del keys[index] - index = keys.index('graph_type') - del keys[index] + for each in ['title', 'graph_type', 'timescale', + 'updates_infrequently']: - index = keys.index('timescale') - del keys[index] + if each in keys: + index = keys.index(each) + del keys[index] # Make sure any wildcards are correctly formatted for each_key in keys: @@ -316,16 +351,16 @@ def post_edit(self, request): return '' self.__finish_post_edit(request, username, title, timescale, keys, - graph_type) + graph_type, updates_infrequently) return NOT_DONE_YET @defer.inlineCallbacks def __finish_post_edit(self, request, username, title, timescale, keys, - graph_type): + graph_type, updates_infrequently): yield self.__redis_model_graph.update_graph(username, title, timescale, - keys, graph_type) + keys, graph_type, updates_infrequently) request.setResponseCode(303) request.redirect('/') @@ -337,42 +372,89 @@ def get_graph(self, request, graph_username, title): username = request.getCookie('username') + timezone = request.getCookie('timezone') + if timezone is None: + timezone = 'local' + + graph_timezone = request.args.get('timezone', None) + if graph_timezone is None: + graph_timezone = timezone + # HACK: routes can't handle URLs with %2F in them ('/') # so replace '$2F' with '%2F' as we unquote the title title = urllib.unquote_plus(title.replace('$2F', '%2F')) graph_type = request.args.get('graph_type', '') timescale = request.args.get('timescale', '') + autorefresh = request.args.get('refresh', False) force_max_value = float(request.args.get('max', 0)) - for each in [graph_type, timescale]: - if each == '': - request.setResponseCode(400) - return '' - self.__finish_get_graph(request, username, graph_username, title, - graph_type, timescale, force_max_value) + graph_timezone, timezone, graph_type, timescale, autorefresh, force_max_value) return NOT_DONE_YET @defer.inlineCallbacks def __finish_get_graph(self, request, username, graph_username, title, - graph_type, timescale, force_max_value): - - graphs = yield self.__redis_model_graph.get_graphs(graph_username) - - graph_details = yield self.__get_graph_details(title, graphs[title], - graph_type, timescale) + graph_timezone, timezone, graph_type, timescale, autorefresh, force_max_value): template = self.__template_lookup.get_template('graph.mako') ret = template.render(username=username, graph_username=graph_username, - title=title, graph_type=graph_type, graph=[graph_details], - force_max_value=force_max_value).encode('utf8') + title=title, graph_type=graph_type, timescale=timescale, force_max_value=force_max_value, graph_timezone=graph_timezone, timezone=timezone, autorefresh=autorefresh).encode('utf8') request.write(ret) request.finish() + def get_components(self, request): + username = request.getCookie('username') + self.__finish_get_components(request, username) + + return NOT_DONE_YET + + @defer.inlineCallbacks + def __finish_get_components(self, request, username): + + components = yield self.__redis_model_data.get_components() + + template = self.__template_lookup.get_template('components.mako') + + page = template.render(username=username, + components=components).encode('utf8') + + request.write(page) + request.finish() + + @straighten_out_request + def get_stats(self, request, graph_username, title): + + self._log.debug('get stats %s %s', graph_username, title) + + graph_type = request.args.get('graph_type', '') + timescale = request.args.get('timescale', '') + + username = request.getCookie('username') + timezone = request.getCookie('timezone') + if timezone is None: + timezone = 'local' + + title = urllib.unquote_plus(title.replace('$2F', '%2F')) + + self.__finish_get_stats(request, username, graph_username, title, + timezone, timescale, graph_type) + + return NOT_DONE_YET + + @defer.inlineCallbacks + def __finish_get_stats(self, request, username, graph_username, title, timezone, timescale, graph_type): + + template = self.__template_lookup.get_template('statistics.mako') + + page = template.render(username=username, graph_username=graph_username, title=title, timezone=timezone, timescale=timescale, graph_type=graph_type).encode('utf8') + + request.write(page) + request.finish() + # AJAX calls to manipulate user state @straighten_out_request def post_graph_ordering(self, request): @@ -427,7 +509,8 @@ def __finish_post_add_graph_from_other_user(self, request, username, graph_type = graphs[title]['graph_type'] yield self.__redis_model_graph.update_graph(username, title, - timescale, graphs[title]['fields'], graph_type) + timescale, graphs[title]['fields'], graph_type, + graphs[title].get('updates_infrequently', False)) request.setResponseCode(303) request.redirect('/') @@ -438,32 +521,13 @@ def __finish_post_add_graph_from_other_user(self, request, username, def post_data(self, request, component): self._log.debug('posting data for %s %s', component, request.args) - deferreds = [] - - for metric, value in request.args.iteritems(): - deferred = self.__redis_model_data.update_metric(component, metric, - int(value)) - - deferreds.append(deferred) - - defer_list = defer.DeferredList(deferreds, consumeErrors=True) - defer_list.addCallback(self.__finish_post_data, request) - - return NOT_DONE_YET - - def __finish_post_data(self, responses, request): - errors = [] - for (success, exception) in responses: - if not success: - errors.append(exception.value.message) + try: + self.__queue.put([component, request.args], block=False) + return 'OK\n' - if errors: + except Queue.Full: request.setResponseCode(400) - request.write(simplejson.dumps(errors)) - else: - request.write('OK') - - request.finish() + return simplejson.dumps({'error': 'Too many pending requests'}) @straighten_out_request def get_data(self, request, component, metric): @@ -484,6 +548,73 @@ def __finish_get_data(self, request, component, metric, timescale): request.write(simplejson.dumps(data)) request.finish() + @straighten_out_request + def get_graph_data(self, request, graph_username, title): + timescale = request.args.get('timescale', None) + graph_type = request.args.get('graph_type', None) + + if timescale not in self.timescales: + timescale = None + if graph_type not in self.graph_types: + graph_type = None + + self.__finish_get_graph_data(request, graph_username, title, timescale, graph_type) + + return NOT_DONE_YET + + @defer.inlineCallbacks + def __finish_get_graph_data(self, request, graph_username, title, timescale, graph_type): + graphs = yield self.__redis_model_graph.get_graphs(graph_username) + + # HACK: routes can't handle URLs with %2F in them ('/') + # so replace '$2F' with '%2F' as we unquote the title + title = urllib.unquote_plus(title.replace('$2F', '%2F')) + + graph_details = yield self.__get_graph_details(title, graphs[title], graph_type, timescale) + + request.write(simplejson.dumps(graph_details)) + request.finish() + + @straighten_out_request + def get_stats_data(self, request, graph_username, title): + timescale = request.args.get('timescale', None) + graph_type = request.args.get('graph_type', None) + + if timescale not in self.timescales: + timescale = None + if graph_type not in self.graph_types: + graph_type = None + + self.__finish_get_stats_data(request, graph_username, title, timescale, graph_type) + + return NOT_DONE_YET + + @defer.inlineCallbacks + def __finish_get_stats_data(self, request, graph_username, title, timescale, graph_type): + graphs = yield self.__redis_model_graph.get_graphs(graph_username) + + # HACK: routes can't handle URLs with %2F in them ('/') + # so replace '$2F' with '%2F' as we unquote the title + title = urllib.unquote_plus(title.replace('$2F', '%2F')) + + graph_details = yield self.__get_graph_details(title, graphs[title], + graph_type, timescale) + + line_names = graph_details[6] + data_rows = graph_details[7] + + summarystats = [None] * len(data_rows) + for i in xrange(0, len(data_rows)): + summarystats[i] = self.__get_summarystats(line_names[i], data_rows[i]) + + percentiles, rolling, percents = self.__get_stats_tables_data(data_rows) + + toWrite = {"graph_details": graph_details, "summary": summarystats, + "percentiles": percentiles, "rolling": rolling, "percents": percents} + + request.write(simplejson.dumps(toWrite)) + request.finish() + @straighten_out_request def delete_data(self, request, component, metric=None): self.__finish_delete_data(request, component, metric) @@ -515,6 +646,10 @@ def post_login(self, request): request.addCookie('username', username, expires=expires_str) + timezone = request.getCookie('timezone') + if timezone is None: + request.addCookie('timezone', 'local', expires=expires_str) + referer = request.getHeader('Referer') if referer is None: referer = '/' @@ -537,7 +672,58 @@ def get_logout(self, request): request.redirect(referer) return '' + @straighten_out_request + def post_timezone(self, request): + + new_timezone = request.args.get('timezone', None) + + if new_timezone is not None: + current_utc_time = datetime.datetime.utcnow() + current_utc_time += datetime.timedelta(days=365) + expires_str = current_utc_time.strftime('%a, %d-%b-%Y %H:%M:%S GMT') + + request.addCookie('timezone', new_timezone, expires=expires_str) + + referer = request.getHeader('Referer') + if referer is None: + referer = '/' + + request.setResponseCode(303) + request.redirect(referer) + return '' + # Helpers + def __get_summarystats(self, title, data): + data_min = np.amin(data) + data_max = np.amax(data) + data_range = data_max-data_min + data_mean = np.mean(data) + data_median = np.median(data) + data_std = np.std(data) + + return (title, data_min, data_max, data_range, data_mean, data_median, data_std) + + def __get_stats_tables_data(self, data): + all_data = sum(data, []) + percentiles = [None] * 100 + for i in xrange(101): + percentiles[i-1] = [i, np.percentile(all_data, i)] + + dataFrame = pd.DataFrame(np.array(data).T) + + rollingPrep = pd.rolling_mean(dataFrame, window=10) + rollingPrep = rollingPrep.replace([np.inf, -np.inf], ["inf", "-inf"]) + rollingPrep = rollingPrep.fillna('na') + rolling = np.array(rollingPrep).tolist() + + percentsPrep = dataFrame.pct_change(5) + percentsPrep = 100 * np.round(percentsPrep, decimals=2) + percentsPrep = percentsPrep.replace([np.inf, -np.inf], ["inf", "-inf"]) + percentsPrep = percentsPrep.fillna('na') + percents = np.array(percentsPrep).tolist() + + return (percentiles, rolling, percents) + @defer.inlineCallbacks def __get_graph_details(self, title, graph, graph_type=None, timescale=None): @@ -548,6 +734,8 @@ def __get_graph_details(self, title, graph, graph_type=None, if not timescale or timescale not in self.timescales: timescale = graph['timescale'] + updates_infrequently = graph.get('updates_infrequently', False) + fields = graph['fields'] fields.sort() # TODO: migrate graphs so fields are already sorted @@ -583,7 +771,6 @@ def __get_graph_details(self, title, graph, graph_type=None, else: matching_components = [component] - for each_component in matching_components: metrics = yield self.__redis_model_data.get_metrics( each_component) @@ -620,13 +807,11 @@ def __get_graph_details(self, title, graph, graph_type=None, line_names.append(line_name.encode('utf8')) data = yield self.__redis_model_data.get_data( - each_component, each_metric, timescale) + each_component, each_metric, timescale, + updates_infrequently) data_rows.append(data) - # d3 wants time in ms - current_time_slot = (int(time.time()) / 60 * 60) * 1000 - if len(data_rows) > 0: length = max([len(row) for row in data_rows]) @@ -644,12 +829,30 @@ def __get_graph_details(self, title, graph, graph_type=None, defer.returnValue((title, title_urlencoded, graph_type, urllib.quote_plus(graph_type), timescale, - time_per_data_point, line_names, data_rows, current_time_slot, + time_per_data_point, line_names, data_rows, length, max_value)) +def update_metric_process(queue, redis_host): + log = logging.getLogger('tinyfeedback') + + blocking_data = redis_model.BlockingData(redis_host) + + while True: + try: + (component, args) = queue.get(timeout=1) -def set_up_server(port, log_path, log_level): - # Set up logging + for metric, value in args.iteritems(): + blocking_data.update_metric(component, metric, + int(float(value))) + + except Queue.Empty: + continue + + except Exception, e: + log.exception('Encountered exception') + continue + +def set_up_server(host, port, log_path, log_level, redis_host='127.0.0.1', redis_pool_size=None): log = logging.getLogger('tinyfeedback') level = getattr(logging, log_level, logging.INFO) log.setLevel(level) @@ -668,15 +871,25 @@ def set_up_server(port, log_path, log_level): handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')) log.addHandler(handler) - # Connect to redis - redis_model_data = redis_model.Data('127.0.0.1') - redis_model_data.connect() + redis_model_data = redis_model.Data(redis_host) + redis_model_data.connect(redis_pool_size) + + redis_model_graph = redis_model.Graph(redis_host) + redis_model_graph.connect(redis_pool_size) + + queue = multiprocessing.Queue(50) - redis_model_graph = redis_model.Graph('127.0.0.1') - redis_model_graph.connect() + for i in xrange(2): + p = multiprocessing.Process(target=update_metric_process, + kwargs={ + 'queue': queue, + 'redis_host': redis_host, + }) - # Set up the webserver - controller = Controller(redis_model_data, redis_model_graph, log) + p.daemon = True + p.start() + + controller = Controller(redis_model_data, redis_model_graph, queue, log) dispatcher = txroutes.Dispatcher() @@ -687,8 +900,8 @@ def set_up_server(port, log_path, log_level): dispatcher.connect('get_dashboards', '/dashboards', controller=controller, action='get_dashboards', conditions=dict(method=['GET'])) - dispatcher.connect('get_user_dashboards', '/dashboards/{dashboard_username}', - controller=controller, action='get_user_dashboards', + dispatcher.connect('get_user_dashboard', '/dashboards/{dashboard_username}', + controller=controller, action='get_user_dashboard', conditions=dict(method=['GET'])) dispatcher.connect('delete_user', '/dashboards/{dashboard_username}', @@ -709,6 +922,14 @@ def set_up_server(port, log_path, log_level): controller=controller, action='get_graph', conditions=dict(method=['GET'])) + dispatcher.connect('get_components', '/view', + controller=controller, action='get_components', + conditions=dict(method=['GET'])) + + dispatcher.connect('get_stats', '/stats/{graph_username}/{title}', + controller=controller, action='get_stats', + conditions=dict(method=['GET'])) + # AJAX calls to manipulate user state dispatcher.connect('post_graph_ordering', '/graph_ordering', controller=controller, action='post_graph_ordering', @@ -726,6 +947,14 @@ def set_up_server(port, log_path, log_level): controller=controller, action='get_data', conditions=dict(method=['GET'])) + dispatcher.connect('get_graph_data', '/graphdata/{graph_username}/{title}', + controller=controller, action='get_graph_data', + conditions=dict(method=['GET'])) + + dispatcher.connect('get_stats_data', '/statsdata/{graph_username}/{title}', + controller=controller, action='get_stats_data', + conditions=dict(method=['GET'])) + dispatcher.connect('delete_data', '/data/:component', controller=controller, action='delete_data', conditions=dict(method=['DELETE'])) @@ -741,12 +970,15 @@ def set_up_server(port, log_path, log_level): dispatcher.connect('get_logout', '/logout', controller=controller, action='get_logout', conditions=dict(method=['GET'])) + dispatcher.connect('post_timezone', '/timezone', controller=controller, + action='post_timezone', conditions=dict(method=['POST'])) + static_path = os.path.join(os.path.dirname(__file__), 'static') dispatcher.putChild('static', File(static_path)) factory = Site(dispatcher) - reactor.listenTCP(port, factory) + reactor.listenTCP(port, factory, interface=host) log.info('tiny feedback running on port %d', port)