Skip to content

Commit

Permalink
Merge pull request #12 from signal-ai/allow-customising-default-sd
Browse files Browse the repository at this point in the history
  • Loading branch information
Limess authored Apr 8, 2021
2 parents 78795c0 + f9d0df4 commit 6e81c08
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 18 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@ format:
.PHONY: dev-start
dev-start:
@mkdir -p ./targets
poetry run python discoverecs.py --directory $$PWD/targets
poetry run python discoverecs.py --directory $$PWD/targets --default-scrape-interval-prefix default
25 changes: 21 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,7 @@ The output directory is then `/opt/prometheus-ecs` defined in your Prometheus co
regex: (.+)
```
You can also specify a discovery interval with `--interval` (in seconds). Default is 60s. We also provide caching to minimize hitting query
rate limits with the AWS ECS API. `discoverecs.py` runs in a loop until interrupted and will output target information to stdout.
You can also specify a discovery interval with `--interval` (in seconds). The default is `60s`. We also provide caching to minimize hitting query rate limits with the AWS ECS API. `discoverecs.py` runs in a loop until interrupted and will output target information to stdout.

To make your application discoverable by Prometheus, you need to set the following environment variable in your task definition:

Expand All @@ -72,9 +71,11 @@ To make your application discoverable by Prometheus, you need to set the followi
Metric path and scrape interval is supported via `PROMETHEUS_ENDPOINT`:

```text
"interval:/metric_path,..."
"[interval:]<metric_path>,..."
```

where `interval` is optional.

Examples:

```text
Expand All @@ -91,7 +92,23 @@ Under ECS task definition (`task.json`):

Available scrape intervals: `15s`, `30s`, `1m`, `5m`.

The default metric path is `/metrics`. The default scrape interval is `1m`.
The default metric path is `/metrics`.

### Default scrape interval

The default scrape interval is `1m` when no interval is specified in the `PROMETHEUS_ENDPOINT` variable.

This can be customised using the option `--default-scrape-interval-prefix`. This can be any string which will result in the targets being output to `/opt/prometheus-ecs/<default_scrape_interval>-tasks.json` being written.

e.g. if `default` is used:

```shell
--default-scrape-interval-prefix default
```

then `/opt/prometheus-ecs/default-tasks.json` will be written. This can be useful to allow configuration of a default scrape interval in your Prometheus config, rather than needing to update the config and then redeploying this discovery service.

### Configuration yaml

The following Prometheus configuration should be used to support all available intervals:

Expand Down
32 changes: 19 additions & 13 deletions discoverecs.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,11 +341,11 @@ def extract_path_interval(env_variable):
if re.search("(15s|30s|1m|5m)", pi[0]):
path_interval[pi[1]] = pi[0]
else:
path_interval[pi[1]] = "1m"
path_interval[pi[1]] = None
else:
path_interval[lst] = "1m"
path_interval[lst] = None
else:
path_interval["/metrics"] = "1m"
path_interval["/metrics"] = None
return path_interval


Expand Down Expand Up @@ -457,14 +457,15 @@ def task_info_to_targets(task_info):


class Main:
def __init__(self, directory, interval):
def __init__(self, directory, interval, default_scrape_interval_prefix):
self.directory = directory
self.interval = interval
self.default_scrape_interval_prefix = default_scrape_interval_prefix
self.discoverer = TaskInfoDiscoverer()

def write_jobs(self, jobs):
for i, j in jobs.items():
file_name = self.directory + "/" + i + "-tasks.json"
for prefix, j in jobs.items():
file_name = self.directory + "/" + prefix + "-tasks.json"
tmp_file_name = file_name + ".tmp"
with open(tmp_file_name, "w") as f:
f.write(json.dumps(j, indent=4))
Expand Down Expand Up @@ -511,7 +512,7 @@ def discover_tasks(self):
}
if labels:
job["labels"].update(labels)
jobs[interval].append(job)
jobs[interval or self.default_scrape_interval_prefix].append(job)
log(job)
self.write_jobs(jobs)

Expand All @@ -525,15 +526,20 @@ def main():
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("--directory", required=True)
arg_parser.add_argument("--interval", default=60)
arg_parser.add_argument("--default-scrape-interval-prefix", default="1m")
args = arg_parser.parse_args()
log(
"Starting. Directory: "
+ args.directory
+ ". Interval: "
+ str(args.interval)
+ "s."
'Starting...\nDirectory: "{}"\nRefresh interval: "{}s"\nDefault scrape interval prefix: "{}"\n'.format(
args.directory,
str(args.interval),
args.default_scrape_interval_prefix,
)
)
Main(args.directory, float(args.interval)).loop()
Main(
directory=args.directory,
interval=float(args.interval),
default_scrape_interval_prefix=args.default_scrape_interval_prefix,
).loop()


if __name__ == "__main__":
Expand Down

0 comments on commit 6e81c08

Please sign in to comment.