Skip to content

Commit

Permalink
Allow customisation of default interval
Browse files Browse the repository at this point in the history
  • Loading branch information
Limess committed Apr 8, 2021
1 parent 78795c0 commit 6986ea1
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 12 deletions.
21 changes: 18 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,7 @@ The output directory is then `/opt/prometheus-ecs` defined in your Prometheus co
regex: (.+)
```
You can also specify a discovery interval with `--interval` (in seconds). Default is 60s. We also provide caching to minimize hitting query
rate limits with the AWS ECS API. `discoverecs.py` runs in a loop until interrupted and will output target information to stdout.
You can also specify a discovery interval with `--interval` (in seconds). The default is `60s`. We also provide caching to minimize hitting query rate limits with the AWS ECS API. `discoverecs.py` runs in a loop until interrupted and will output target information to stdout.

To make your application discoverable by Prometheus, you need to set the following environment variable in your task definition:

Expand Down Expand Up @@ -91,7 +90,23 @@ Under ECS task definition (`task.json`):

Available scrape intervals: `15s`, `30s`, `1m`, `5m`.

The default metric path is `/metrics`. The default scrape interval is `1m`.
The default metric path is `/metrics`.

### Default scrape interval

The default scrape interval is `1m` when no interval is specified in the `PROMETHEUS_ENDPOINT` variable.

This can be customised using the option `--default-scrape-interval`. This can be any string which will result in the targets being output to `/opt/prometheus-ecs/<default_scrape_interval>-tasks.json` being written.

e.g. if `default` is used:

```shell
--default-scrape-interval default
```

then `/opt/prometheus-ecs/default-tasks.json` will be written. This can be useful to allow configuration of a default scrape interval in your Prometheus config, rather than needing to update the config and then redeploying this discovery service.

### Configuration yaml

The following Prometheus configuration should be used to support all available intervals:

Expand Down
24 changes: 15 additions & 9 deletions discoverecs.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,11 +341,11 @@ def extract_path_interval(env_variable):
if re.search("(15s|30s|1m|5m)", pi[0]):
path_interval[pi[1]] = pi[0]
else:
path_interval[pi[1]] = "1m"
path_interval[pi[1]] = None
else:
path_interval[lst] = "1m"
path_interval[lst] = None
else:
path_interval["/metrics"] = "1m"
path_interval["/metrics"] = None
return path_interval


Expand Down Expand Up @@ -457,14 +457,15 @@ def task_info_to_targets(task_info):


class Main:
def __init__(self, directory, interval):
def __init__(self, directory, interval, default_scrape_interval):
self.directory = directory
self.interval = interval
self.default_scrape_interval = default_scrape_interval
self.discoverer = TaskInfoDiscoverer()

def write_jobs(self, jobs):
for i, j in jobs.items():
file_name = self.directory + "/" + i + "-tasks.json"
for interval, j in jobs.items():
file_name = self.directory + "/" + interval + "-tasks.json"
tmp_file_name = file_name + ".tmp"
with open(tmp_file_name, "w") as f:
f.write(json.dumps(j, indent=4))
Expand Down Expand Up @@ -511,7 +512,7 @@ def discover_tasks(self):
}
if labels:
job["labels"].update(labels)
jobs[interval].append(job)
jobs[interval or self.default_scrape_interval].append(job)
log(job)
self.write_jobs(jobs)

Expand All @@ -525,15 +526,20 @@ def main():
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("--directory", required=True)
arg_parser.add_argument("--interval", default=60)
arg_parser.add_argument("--default-scrape-interval", default="1m")
args = arg_parser.parse_args()
log(
"Starting. Directory: "
+ args.directory
+ ". Interval: "
+ ". Refresh interval: "
+ str(args.interval)
+ "s."
)
Main(args.directory, float(args.interval)).loop()
Main(
directory=args.directory,
interval=float(args.interval),
default_scrape_interval=args.default_scrape_interval,
).loop()


if __name__ == "__main__":
Expand Down

0 comments on commit 6986ea1

Please sign in to comment.