From 78795c00c8a596600a37e151028f568221cc6790 Mon Sep 17 00:00:00 2001 From: Charlie Briggs Date: Wed, 16 Dec 2020 10:54:44 +0000 Subject: [PATCH] Add Makefile, some simple readme info. Remove python2 mentioning There are no changes which mean python2 is actually unsupportd, but we don't want to guarantee this going forwards so remove it from the README --- .gitignore | 1 + Makefile | 8 + README.md | 106 ++++++++----- discoverecs.py | 409 ++++++++++++++++++++++++++++++++----------------- poetry.lock | 291 +++++++++++++++++++++++++++++++++++ pyproject.toml | 17 ++ 6 files changed, 651 insertions(+), 181 deletions(-) create mode 100644 .gitignore create mode 100644 Makefile create mode 100644 poetry.lock create mode 100644 pyproject.toml diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2951e5c --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +targets diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..b94b0b4 --- /dev/null +++ b/Makefile @@ -0,0 +1,8 @@ +.PHONY: format +format: + poetry run black . + +.PHONY: dev-start +dev-start: + @mkdir -p ./targets + poetry run python discoverecs.py --directory $$PWD/targets diff --git a/README.md b/README.md index 902fe7f..f98c2bd 100644 --- a/README.md +++ b/README.md @@ -1,33 +1,63 @@ # prometheus-ecs-sd + ECS Service Discovery for Prometheus ## Info + This tool provides Prometheus service discovery for Docker containers running on AWS ECS. You can easily instrument your app using a Prometheus client and enable discovery adding an ENV variable at the Service Task Definition. Your container will then be added -to the list of Prometheus targets to be scraped. Requires python2 or python3 and boto3. Works with Prometheus 2.x. It supports bridge, host -and awsvpc (EC2 and Fargate) network modes. +to the list of Prometheus targets to be scraped. + +Bridge, host and awsvpc (EC2 and Fargate) network modes are supported. + +Requires + +- `python3` +- The `boto3` library +- Prometheus `2.x`. + +## Developing + +Local development requires the [poetry](https://python-poetry.org/) tool, check the documentation for installation instructions. + +To start the service run + +```shell +AWS_PROFILE= make dev-start +``` + +any AWS configuration supported by boto3 is supported, e.g. individual access/secret keys. + +To format code run + +```shell +make format +``` ## Setup + `discoverecs.py` should run alongside the Prometheus server. It generates targets using JSON file service discovery. It can be started by running: -``` +```shell python discoverecs.py --directory /opt/prometheus-ecs ``` -Where `/opt/prometheus-ecs` is defined in your Prometheus config as a `file_sd_config` job: +note that the directory must already exist. + +The output directory is then `/opt/prometheus-ecs` defined in your Prometheus config as a `file_sd_config` job: ```yaml - job_name: 'ecs-1m' scrape_interval: 1m file_sd_configs: - - files: - - /opt/prometheus-ecs/1m-tasks.json + - files: + - /opt/prometheus-ecs/1m-tasks.json relabel_configs: - - source_labels: [metrics_path] - action: replace - target_label: __metrics_path__ - regex: (.+) + - source_labels: [metrics_path] + action: replace + target_label: __metrics_path__ + regex: (.+) ``` You can also specify a discovery interval with `--interval` (in seconds). Default is 60s. We also provide caching to minimize hitting query @@ -35,19 +65,19 @@ rate limits with the AWS ECS API. `discoverecs.py` runs in a loop until interrup To make your application discoverable by Prometheus, you need to set the following environment variable in your task definition: -``` -{"name": "PROMETHEUS", "value": "true"} +```json +{ "name": "PROMETHEUS", "value": "true" } ``` Metric path and scrape interval is supported via `PROMETHEUS_ENDPOINT`: -``` +```text "interval:/metric_path,..." ``` Examples: -``` +```text "5m:/mymetrics,30s:/mymetrics2" "/mymetrics" "30s:/mymetrics1,/mymetrics2" @@ -55,8 +85,8 @@ Examples: Under ECS task definition (`task.json`): -``` -{"name": "PROMETHEUS_ENDPOINT", "value": "5m:/mymetrics,30s:/mymetrics2"} +```json +{ "name": "PROMETHEUS_ENDPOINT", "value": "5m:/mymetrics,30s:/mymetrics2" } ``` Available scrape intervals: `15s`, `30s`, `1m`, `5m`. @@ -66,50 +96,51 @@ The default metric path is `/metrics`. The default scrape interval is `1m`. The following Prometheus configuration should be used to support all available intervals: ```yaml - - job_name: 'ecs-15s' - scrape_interval: 15s - file_sd_configs: +- job_name: 'ecs-15s' + scrape_interval: 15s + file_sd_configs: - files: - - /opt/prometheus-ecs/15s-tasks.json - relabel_configs: + - /opt/prometheus-ecs/15s-tasks.json + relabel_configs: - source_labels: [metrics_path] action: replace target_label: __metrics_path__ regex: (.+) - - job_name: 'ecs-30s' - scrape_interval: 30s - file_sd_configs: +- job_name: 'ecs-30s' + scrape_interval: 30s + file_sd_configs: - files: - - /opt/prometheus-ecs/30s-tasks.json - relabel_configs: + - /opt/prometheus-ecs/30s-tasks.json + relabel_configs: - source_labels: [metrics_path] action: replace target_label: __metrics_path__ regex: (.+) - - job_name: 'ecs-1m' - scrape_interval: 1m - file_sd_configs: +- job_name: 'ecs-1m' + scrape_interval: 1m + file_sd_configs: - files: - - /opt/prometheus-ecs/1m-tasks.json - relabel_configs: + - /opt/prometheus-ecs/1m-tasks.json + relabel_configs: - source_labels: [metrics_path] action: replace target_label: __metrics_path__ regex: (.+) - - - job_name: 'ecs-5m' - scrape_interval: 5m - file_sd_configs: + +- job_name: 'ecs-5m' + scrape_interval: 5m + file_sd_configs: - files: - - /opt/prometheus-ecs/5m-tasks.json - relabel_configs: + - /opt/prometheus-ecs/5m-tasks.json + relabel_configs: - source_labels: [metrics_path] action: replace target_label: __metrics_path__ regex: (.+) ``` + ## EC2 IAM Policy The following IAM Policy should be added when running `discoverecs.py` in EC2: @@ -135,6 +166,7 @@ resource "aws_iam_role_policy_attachment" "prometheus-server-role-ec2-read-only" ``` ## Special cases + For skipping labels, set `PROMETHEUS_NOLABELS` to `true`. This is useful when you use "blackbox" exporters or Pushgateway in a task and metrics are exposed at a service level. This way, no EC2/ECS labels diff --git a/discoverecs.py b/discoverecs.py index ce6fcba..939e843 100644 --- a/discoverecs.py +++ b/discoverecs.py @@ -8,7 +8,7 @@ import re """ -Copyright 2018, 2019 Signal Media Ltd +Copyright 2018, 2019, 2020 Signal Media Ltd ECS service discovery for tasks. Please enable it by setting env variable PROMETHEUS to "true". @@ -40,11 +40,14 @@ for tasks using a classic ELB setup with multiple port mappings. """ + def log(message): print(message) + def chunk_list(l, n): - return [l[i:i + n] for i in range(0, len(l), n)] + return [l[i : i + n] for i in range(0, len(l), n)] + def dict_get(d, k, default): if k in d: @@ -52,8 +55,8 @@ def dict_get(d, k, default): else: return default -class FlipCache(): +class FlipCache: def __init__(self): self.current_cache = {} self.next_cache = {} @@ -96,7 +99,6 @@ def get(self, key, fetcher): class TaskInfo: - def __init__(self, task): self.task = task self.task_definition = None @@ -104,16 +106,18 @@ def __init__(self, task): self.ec2_instance = None def valid(self): - if 'FARGATE' in self.task_definition.get('requiresCompatibilities', ''): + if "FARGATE" in self.task_definition.get("requiresCompatibilities", ""): return self.task_definition else: - return self.task_definition and self.container_instance and self.ec2_instance + return ( + self.task_definition and self.container_instance and self.ec2_instance + ) -class TaskInfoDiscoverer: +class TaskInfoDiscoverer: def __init__(self): - self.ec2_client = boto3.client('ec2') - self.ecs_client = boto3.client('ecs') + self.ec2_client = boto3.client("ec2") + self.ecs_client = boto3.client("ecs") self.task_cache = FlipCache() self.task_definition_cache = FlipCache() self.container_instance_cache = FlipCache() @@ -127,34 +131,64 @@ def flip_caches(self): def describe_tasks(self, cluster_arn, task_arns): def fetcher_task_definition(arn): - return self.ecs_client.describe_task_definition(taskDefinition=arn)['taskDefinition'] + return self.ecs_client.describe_task_definition(taskDefinition=arn)[ + "taskDefinition" + ] def fetcher(fetch_task_arns): tasks = {} - result = self.ecs_client.describe_tasks(cluster=cluster_arn, tasks=fetch_task_arns) - if 'tasks' in result: - for task in result['tasks']: + result = self.ecs_client.describe_tasks( + cluster=cluster_arn, tasks=fetch_task_arns + ) + if "tasks" in result: + for task in result["tasks"]: no_network_binding = [] - for container in task['containers']: - if ('networkBindings' not in container or len(container['networkBindings']) == 0) and len(container['networkInterfaces']) == 0: - no_network_binding.append(container['name']) + for container in task["containers"]: + if ( + "networkBindings" not in container + or len(container["networkBindings"]) == 0 + ) and len(container["networkInterfaces"]) == 0: + no_network_binding.append(container["name"]) if no_network_binding: - arn = task['taskDefinitionArn'] - no_cache = None - task_definition = self.task_definition_cache.get(arn, fetcher_task_definition) - is_host_network_mode = task_definition.get('networkMode') == 'host' - for container_definition in task_definition['containerDefinitions']: - prometheus = get_environment_var(container_definition['environment'], 'PROMETHEUS') - prometheus_port = get_environment_var(container_definition['environment'], 'PROMETHEUS_PORT') - port_mappings = container_definition.get('portMappings') - if container_definition['name'] in no_network_binding and prometheus and not (is_host_network_mode and (prometheus_port or port_mappings)): - log(task['group'] + ':' + container_definition['name'] + ' does not have a networkBinding. Skipping for next run.') - no_cache = True - if not no_cache: - tasks[task['taskArn']] = task + arn = task["taskDefinitionArn"] + no_cache = None + task_definition = self.task_definition_cache.get( + arn, fetcher_task_definition + ) + is_host_network_mode = ( + task_definition.get("networkMode") == "host" + ) + for container_definition in task_definition[ + "containerDefinitions" + ]: + prometheus = get_environment_var( + container_definition["environment"], "PROMETHEUS" + ) + prometheus_port = get_environment_var( + container_definition["environment"], "PROMETHEUS_PORT" + ) + port_mappings = container_definition.get("portMappings") + if ( + container_definition["name"] in no_network_binding + and prometheus + and not ( + is_host_network_mode + and (prometheus_port or port_mappings) + ) + ): + log( + task["group"] + + ":" + + container_definition["name"] + + " does not have a networkBinding. Skipping for next run." + ) + no_cache = True + if not no_cache: + tasks[task["taskArn"]] = task else: - tasks[task['taskArn']] = task + tasks[task["taskArn"]] = task return tasks + return self.task_cache.get_dict(task_arns, fetcher).values() def create_task_infos(self, cluster_arn, task_arns): @@ -162,10 +196,12 @@ def create_task_infos(self, cluster_arn, task_arns): def add_task_definitions(self, task_infos): def fetcher(arn): - return self.ecs_client.describe_task_definition(taskDefinition=arn)['taskDefinition'] + return self.ecs_client.describe_task_definition(taskDefinition=arn)[ + "taskDefinition" + ] for task_info in task_infos: - arn = task_info.task['taskDefinitionArn'] + arn = task_info.task["taskDefinitionArn"] task_info.task_definition = self.task_definition_cache.get(arn, fetcher) def add_container_instances(self, task_infos, cluster_arn): @@ -173,15 +209,23 @@ def fetcher(arns): arnsChunked = chunk_list(arns, 100) instances = {} for arns in arnsChunked: - result = self.ecs_client.describe_container_instances(cluster=cluster_arn, containerInstances=arns) - for i in dict_get(result, 'containerInstances', []): - instances[i['containerInstanceArn']] = i + result = self.ecs_client.describe_container_instances( + cluster=cluster_arn, containerInstances=arns + ) + for i in dict_get(result, "containerInstances", []): + instances[i["containerInstanceArn"]] = i return instances - containerInstanceArns = list(set(map(lambda t: t.task['containerInstanceArn'], task_infos))) - containerInstances = self.container_instance_cache.get_dict(containerInstanceArns, fetcher) + containerInstanceArns = list( + set(map(lambda t: t.task["containerInstanceArn"], task_infos)) + ) + containerInstances = self.container_instance_cache.get_dict( + containerInstanceArns, fetcher + ) for t in task_infos: - t.container_instance = dict_get(containerInstances, t.task['containerInstanceArn'], None) + t.container_instance = dict_get( + containerInstances, t.task["containerInstanceArn"], None + ) def add_ec2_instances(self, task_infos): def fetcher(ids): @@ -189,55 +233,78 @@ def fetcher(ids): instances = {} for ids in idsChunked: result = self.ec2_client.describe_instances(InstanceIds=ids) - for r in dict_get(result, 'Reservations', []): - for i in dict_get(r, 'Instances', []): - instances[i['InstanceId']] = i + for r in dict_get(result, "Reservations", []): + for i in dict_get(r, "Instances", []): + instances[i["InstanceId"]] = i return instances - instance_ids = list(set(map(lambda t: t.container_instance['ec2InstanceId'], task_infos))) + instance_ids = list( + set(map(lambda t: t.container_instance["ec2InstanceId"], task_infos)) + ) instances = self.ec2_instance_cache.get_dict(instance_ids, fetcher) for t in task_infos: - t.ec2_instance = dict_get(instances, t.container_instance['ec2InstanceId'], None) + t.ec2_instance = dict_get( + instances, t.container_instance["ec2InstanceId"], None + ) def get_infos_for_cluster(self, cluster_arn, launch_type): - tasks_pages = self.ecs_client.get_paginator('list_tasks').paginate(cluster=cluster_arn, launchType=launch_type) + tasks_pages = self.ecs_client.get_paginator("list_tasks").paginate( + cluster=cluster_arn, launchType=launch_type + ) task_infos = [] for task_arns in tasks_pages: - if task_arns['taskArns']: - task_infos += self.create_task_infos(cluster_arn, task_arns['taskArns']) + if task_arns["taskArns"]: + task_infos += self.create_task_infos(cluster_arn, task_arns["taskArns"]) self.add_task_definitions(task_infos) - if 'EC2' in launch_type: + if "EC2" in launch_type: self.add_container_instances(task_infos, cluster_arn) return task_infos def print_cache_stats(self): - log('task_cache {} {} task_definition_cache {} {} {} container_instance_cache {} {} ec2_instance_cache {} {} {}'.format( - self.task_cache.hits, self.task_cache.misses, - self.task_definition_cache.hits, self.task_definition_cache.misses, - len(self.task_definition_cache.current_cache), - self.container_instance_cache.hits, self.container_instance_cache.misses, - self.ec2_instance_cache.hits, self.ec2_instance_cache.misses, - len(self.ec2_instance_cache.current_cache))) + log( + "task_cache {} {} task_definition_cache {} {} {} container_instance_cache {} {} ec2_instance_cache {} {} {}".format( + self.task_cache.hits, + self.task_cache.misses, + self.task_definition_cache.hits, + self.task_definition_cache.misses, + len(self.task_definition_cache.current_cache), + self.container_instance_cache.hits, + self.container_instance_cache.misses, + self.ec2_instance_cache.hits, + self.ec2_instance_cache.misses, + len(self.ec2_instance_cache.current_cache), + ) + ) def get_infos(self): self.flip_caches() task_infos = [] fargate_task_infos = [] - clusters_pages = self.ecs_client.get_paginator('list_clusters').paginate() + clusters_pages = self.ecs_client.get_paginator("list_clusters").paginate() for clusters in clusters_pages: - for cluster_arn in clusters['clusterArns']: - task_infos += self.get_infos_for_cluster(cluster_arn, 'EC2') - fargate_task_infos += self.get_infos_for_cluster(cluster_arn, 'FARGATE') + for cluster_arn in clusters["clusterArns"]: + task_infos += self.get_infos_for_cluster(cluster_arn, "EC2") + fargate_task_infos += self.get_infos_for_cluster(cluster_arn, "FARGATE") self.add_ec2_instances(task_infos) task_infos += fargate_task_infos self.print_cache_stats() return task_infos -class Target: - def __init__(self, ip, port, metrics_path, - p_instance, ecs_task_id, ecs_task_name, ecs_task_version, - ecs_container_id, ecs_cluster_name, ec2_instance_id): +class Target: + def __init__( + self, + ip, + port, + metrics_path, + p_instance, + ecs_task_id, + ecs_task_name, + ecs_task_version, + ecs_container_id, + ecs_cluster_name, + ec2_instance_id, + ): self.ip = ip self.port = port self.metrics_path = metrics_path @@ -249,86 +316,131 @@ def __init__(self, ip, port, metrics_path, self.ecs_cluster_name = ecs_cluster_name self.ec2_instance_id = ec2_instance_id + def get_environment_var(environment, name): for entry in environment: - if entry['name'] == name: - return entry['value'] + if entry["name"] == name: + return entry["value"] return None -def extract_name(arn): - return arn.split(":")[5].split('/')[-1] + +def extract_name_from_arn(arn): + return arn.split(":")[5].split("/")[-1] + def extract_task_version(taskDefinitionArn): return taskDefinitionArn.split(":")[6] + def extract_path_interval(env_variable): path_interval = {} if env_variable: for lst in env_variable.split(","): - if ':' in lst: + if ":" in lst: pi = lst.split(":") - if re.search('(15s|30s|1m|5m)', pi[0]): + if re.search("(15s|30s|1m|5m)", pi[0]): path_interval[pi[1]] = pi[0] else: - path_interval[pi[1]] = '1m' + path_interval[pi[1]] = "1m" else: - path_interval[lst] = '1m' + path_interval[lst] = "1m" else: - path_interval['/metrics'] = '1m' + path_interval["/metrics"] = "1m" return path_interval + def task_info_to_targets(task_info): + targets = [] + + task = task_info.task + task_definition = task_info.task_definition + if not task_info.valid(): - return [] - for container_definition in task_info.task_definition['containerDefinitions']: - prometheus = get_environment_var(container_definition['environment'], 'PROMETHEUS') - metrics_path = get_environment_var(container_definition['environment'], 'PROMETHEUS_ENDPOINT') - nolabels = get_environment_var(container_definition['environment'], 'PROMETHEUS_NOLABELS') - prom_port = get_environment_var(container_definition['environment'], 'PROMETHEUS_PORT') - prom_container_port = get_environment_var(container_definition['environment'], 'PROMETHEUS_CONTAINER_PORT') - if nolabels != 'true': nolabels = None - containers = filter(lambda c:c['name'] == container_definition['name'], task_info.task['containers']) - if prometheus: - for container in containers: - ecs_task_name=extract_name(task_info.task['taskDefinitionArn']) - has_host_port_mapping = 'portMappings' in container_definition and len(container_definition['portMappings']) > 0 - if prom_port: - first_port = prom_port - elif task_info.task_definition.get('networkMode') in ('host', 'awsvpc'): - if has_host_port_mapping: - first_port = str(container_definition['portMappings'][0]['hostPort']) - else: - first_port = '80' - elif prom_container_port: - binding_by_container_port = [c for c in container['networkBindings'] if str(c['containerPort']) == prom_container_port] - if binding_by_container_port: - first_port = str(binding_by_container_port[0]['hostPort']) - else: - log(task_info.task['group'] + ':' + container_definition['name'] + ' does not expose port matching PROMETHEUS_CONTAINER_PORT, omitting') - return [] - else: - first_port = str(container['networkBindings'][0]['hostPort']) + return targets - if task_info.task_definition.get('networkMode') == 'awsvpc': - interface_ip = container['networkInterfaces'][0]['privateIpv4Address'] + for container_definition in task_definition["containerDefinitions"]: + prometheus_enabled = get_environment_var( + container_definition["environment"], "PROMETHEUS" + ) + metrics_path = get_environment_var( + container_definition["environment"], "PROMETHEUS_ENDPOINT" + ) + nolabels = get_environment_var( + container_definition["environment"], "PROMETHEUS_NOLABELS" + ) + if nolabels != "true": + nolabels = None + prometheus_port = get_environment_var( + container_definition["environment"], "PROMETHEUS_PORT" + ) + prometheus_container_port = get_environment_var( + container_definition["environment"], "PROMETHEUS_CONTAINER_PORT" + ) + running_containers = filter( + lambda container: container["name"] == container_definition["name"], + task["containers"], + ) + if not prometheus_enabled: + continue + + for container in running_containers: + ecs_task_name = extract_name_from_arn(task["taskDefinitionArn"]) + has_host_port_mapping = ( + "portMappings" in container_definition + and len(container_definition["portMappings"]) > 0 + ) + + if prometheus_port: + first_port = prometheus_port + elif task_definition.get("networkMode") in ("host", "awsvpc"): + if has_host_port_mapping: + first_port = str( + container_definition["portMappings"][0]["hostPort"] + ) + else: + first_port = "80" + elif prometheus_container_port: + binding_by_container_port = [ + c + for c in container["networkBindings"] + if str(c["containerPort"]) == prometheus_container_port + ] + if binding_by_container_port: + first_port = str(binding_by_container_port[0]["hostPort"]) else: - interface_ip = task_info.ec2_instance['PrivateIpAddress'] + log( + task["group"] + + ":" + + container_definition["name"] + + " does not expose port matching PROMETHEUS_CONTAINER_PORT, omitting" + ) + return [] + else: + first_port = str(container["networkBindings"][0]["hostPort"]) + + if task_definition.get("networkMode") == "awsvpc": + interface_ip = container["networkInterfaces"][0]["privateIpv4Address"] + else: + interface_ip = task_info.ec2_instance["PrivateIpAddress"] - if nolabels: - p_instance = ecs_task_name - ecs_task_id = ecs_task_version = ecs_container_id = ecs_cluster_name = ec2_instance_id = None + if nolabels: + p_instance = ecs_task_name + ecs_task_id = ( + ecs_task_version + ) = ecs_container_id = ecs_cluster_name = ec2_instance_id = None + else: + p_instance = interface_ip + ":" + first_port + ecs_task_id = extract_name_from_arn(task["taskArn"]) + ecs_task_version = extract_task_version(task["taskDefinitionArn"]) + ecs_cluster_name = extract_name_from_arn(task["clusterArn"]) + if "FARGATE" in task_definition.get("requiresCompatibilities", ""): + ec2_instance_id = ecs_container_id = None else: - p_instance = interface_ip + ':' + first_port - ecs_task_id=extract_name(task_info.task['taskArn']) - ecs_task_version=extract_task_version(task_info.task['taskDefinitionArn']) - ecs_cluster_name=extract_name(task_info.task['clusterArn']) - if 'FARGATE' in task_info.task_definition.get('requiresCompatibilities', ''): - ec2_instance_id = ecs_container_id = None - else: - ec2_instance_id=task_info.container_instance['ec2InstanceId'] - ecs_container_id=extract_name(container['containerArn']) + ec2_instance_id = task_info.container_instance["ec2InstanceId"] + ecs_container_id = extract_name_from_arn(container["containerArn"]) - return [Target( + targets += [ + Target( ip=interface_ip, port=first_port, metrics_path=metrics_path, @@ -338,11 +450,13 @@ def task_info_to_targets(task_info): ecs_task_version=ecs_task_version, ecs_container_id=ecs_container_id, ecs_cluster_name=ecs_cluster_name, - ec2_instance_id=ec2_instance_id)] - return [] + ec2_instance_id=ec2_instance_id, + ) + ] + return targets -class Main: +class Main: def __init__(self, directory, interval): self.directory = directory self.interval = interval @@ -350,9 +464,9 @@ def __init__(self, directory, interval): def write_jobs(self, jobs): for i, j in jobs.items(): - file_name = self.directory + '/' + i + '-tasks.json' - tmp_file_name = file_name + '.tmp' - with open(tmp_file_name, 'w') as f: + file_name = self.directory + "/" + i + "-tasks.json" + tmp_file_name = file_name + ".tmp" + with open(tmp_file_name, "w") as f: f.write(json.dumps(j, indent=4)) os.rename(tmp_file_name, file_name) @@ -366,37 +480,37 @@ def get_targets(self): def discover_tasks(self): targets = self.get_targets() jobs = defaultdict(list) - for i in ['15s','30s','1m','5m']: + for i in ["15s", "30s", "1m", "5m"]: jobs[i] = [] - log('Targets: ' + str(len(targets))) + log("Targets: " + str(len(targets))) for target in targets: path_interval = extract_path_interval(target.metrics_path) for path, interval in path_interval.items(): labels = False if target.ec2_instance_id is None and target.ecs_task_id: labels = { - 'ecs_task_id' : target.ecs_task_id, - 'ecs_task_version' : target.ecs_task_version, - 'ecs_cluster' : target.ecs_cluster_name + "ecs_task_id": target.ecs_task_id, + "ecs_task_version": target.ecs_task_version, + "ecs_cluster": target.ecs_cluster_name, } elif target.ec2_instance_id: labels = { - 'ecs_task_id' : target.ecs_task_id, - 'ecs_task_version' : target.ecs_task_version, - 'ecs_container_id' : target.ecs_container_id, - 'ecs_cluster' : target.ecs_cluster_name, - 'instance_id' : target.ec2_instance_id + "ecs_task_id": target.ecs_task_id, + "ecs_task_version": target.ecs_task_version, + "ecs_container_id": target.ecs_container_id, + "ecs_cluster": target.ecs_cluster_name, + "instance_id": target.ec2_instance_id, } job = { - 'targets' : [target.ip + ':' + target.port], - 'labels' : { - 'instance': target.p_instance, - 'job' : target.ecs_task_name, - 'metrics_path' : path - } + "targets": [target.ip + ":" + target.port], + "labels": { + "instance": target.p_instance, + "job": target.ecs_task_name, + "metrics_path": path, + }, } if labels: - job['labels'].update(labels) + job["labels"].update(labels) jobs[interval].append(job) log(job) self.write_jobs(jobs) @@ -406,14 +520,21 @@ def loop(self): self.discover_tasks() time.sleep(self.interval) + def main(): arg_parser = argparse.ArgumentParser() - arg_parser.add_argument('--directory', required=True) - arg_parser.add_argument('--interval', default=60) + arg_parser.add_argument("--directory", required=True) + arg_parser.add_argument("--interval", default=60) args = arg_parser.parse_args() - log('Starting. Directory: ' + args.directory + '. Interval: ' + str(args.interval) + 's.') + log( + "Starting. Directory: " + + args.directory + + ". Interval: " + + str(args.interval) + + "s." + ) Main(args.directory, float(args.interval)).loop() -if __name__== "__main__": - main() +if __name__ == "__main__": + main() diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..37bded4 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,291 @@ +[[package]] +name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "black" +version = "20.8b1" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +appdirs = "*" +click = ">=7.1.2" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.6,<1" +regex = ">=2020.1.8" +toml = ">=0.10.1" +typed-ast = ">=1.4.0" +typing-extensions = ">=3.7.4" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] + +[[package]] +name = "boto3" +version = "1.16.37" +description = "The AWS SDK for Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +botocore = ">=1.19.37,<1.20.0" +jmespath = ">=0.7.1,<1.0.0" +s3transfer = ">=0.3.0,<0.4.0" + +[[package]] +name = "botocore" +version = "1.19.37" +description = "Low-level, data-driven core of boto 3." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +jmespath = ">=0.7.1,<1.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<1.27", markers = "python_version != \"3.4\""} + +[[package]] +name = "click" +version = "7.1.2" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "jmespath" +version = "0.10.0" +description = "JSON Matching Expressions" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pathspec" +version = "0.8.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "python-dateutil" +version = "2.8.1" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "regex" +version = "2020.11.13" +description = "Alternative regular expression module, to replace re." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "s3transfer" +version = "0.3.3" +description = "An Amazon S3 Transfer Manager" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" + +[[package]] +name = "six" +version = "1.15.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "typed-ast" +version = "1.4.1" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "typing-extensions" +version = "3.7.4.3" +description = "Backported and Experimental Type Hints for Python 3.5+" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "urllib3" +version = "1.26.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +brotli = ["brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.8" +content-hash = "1dcc9e087c81f7e6ac442bff977575c9e3bc3d5e7ac29c2b19cdf210296f43cd" + +[metadata.files] +appdirs = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] +black = [ + {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, +] +boto3 = [ + {file = "boto3-1.16.37-py2.py3-none-any.whl", hash = "sha256:ad0e8dbd934d97b5228252785c0236c3ee4d464c14138f568e371bf43c6ea584"}, + {file = "boto3-1.16.37.tar.gz", hash = "sha256:ee86c26b3d457aa4d0256d0535d13107c32aa33bb5eb2a0b2dac9d81c3aca405"}, +] +botocore = [ + {file = "botocore-1.19.37-py2.py3-none-any.whl", hash = "sha256:5605c250f6f7c72ca50e45eab6186dfda03cb84296ca5b05f7416defcd3fcbc5"}, + {file = "botocore-1.19.37.tar.gz", hash = "sha256:67bf1285455d79336ce7061da1768206b78f7a0efc13c8b4033fd348a74e7491"}, +] +click = [ + {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, + {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, +] +jmespath = [ + {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, + {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +pathspec = [ + {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, + {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, + {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, +] +regex = [ + {file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6"}, + {file = "regex-2020.11.13-cp36-cp36m-win32.whl", hash = "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e"}, + {file = "regex-2020.11.13-cp36-cp36m-win_amd64.whl", hash = "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884"}, + {file = "regex-2020.11.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538"}, + {file = "regex-2020.11.13-cp37-cp37m-win32.whl", hash = "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4"}, + {file = "regex-2020.11.13-cp37-cp37m-win_amd64.whl", hash = "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444"}, + {file = "regex-2020.11.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b"}, + {file = "regex-2020.11.13-cp38-cp38-win32.whl", hash = "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c"}, + {file = "regex-2020.11.13-cp38-cp38-win_amd64.whl", hash = "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683"}, + {file = "regex-2020.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c"}, + {file = "regex-2020.11.13-cp39-cp39-win32.whl", hash = "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f"}, + {file = "regex-2020.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d"}, + {file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"}, +] +s3transfer = [ + {file = "s3transfer-0.3.3-py2.py3-none-any.whl", hash = "sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13"}, + {file = "s3transfer-0.3.3.tar.gz", hash = "sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db"}, +] +six = [ + {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, + {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +typed-ast = [ + {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, + {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, + {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, + {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, + {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, + {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, + {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, + {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, + {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, + {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, + {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, + {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, + {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, + {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, + {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, + {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, + {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, + {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, + {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, + {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, + {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, +] +typing-extensions = [ + {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, + {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, + {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, +] +urllib3 = [ + {file = "urllib3-1.26.2-py2.py3-none-any.whl", hash = "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"}, + {file = "urllib3-1.26.2.tar.gz", hash = "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"}, +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..5abc2f7 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,17 @@ +[tool.poetry] +name = "prometheus-ecs-sd" +version = "1.3.0" +description = "Service discovery mechanism for ECS, writes prometheus target config to disk." +authors = ["Signal AI "] +license = "Apache License 2.0" + +[tool.poetry.dependencies] +python = "^3.6" +boto3 = "^1.16.37" + +[tool.poetry.dev-dependencies] +black = "^20.8b1" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api"