Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Python3 support #21

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
*.iml
.idea
*.pyc
.env
8 changes: 4 additions & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
FROM 234348545939.dkr.ecr.eu-west-1.amazonaws.com/wehkamp/alpine:3.5
FROM alpine

ENTRYPOINT ["python", "-m", "exporter"]
ENTRYPOINT ["python3", "-m", "exporter"]
EXPOSE 9199
ENV FLASK_APP=/exporter/exporter/app.py \
SERVICE_PORT=9199

RUN LAYER=build \
&& apk add -U python py-pip \
&& pip install prometheus_client delorean requests apscheduler Flask \
&& apk add -U python3 py3-pip\
&& pip3 install prometheus_client delorean requests apscheduler Flask \
&& rm -rf /var/cache/apk/* \
&& rm -rf ~/.cache/pip

Expand Down
3 changes: 3 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
run:
docker build . -t docker-prometheus-cloudflare-exporter
docker run --publish 9199:9199 -e AUTH_KEY -e AUTH_EMAIL -e ZONE docker-prometheus-cloudflare-exporter
6 changes: 3 additions & 3 deletions exporter/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import time

from apscheduler.schedulers.background import BackgroundScheduler
from flask import Flask
from flask import Flask, Response
from prometheus_client.core import GaugeMetricFamily
from prometheus_client.exposition import generate_latest

Expand Down Expand Up @@ -202,7 +202,7 @@ def update_latest():

latest_metrics = (get_colo_metrics() + get_dns_metrics() +
get_waf_metrics())
latest_metrics += generate_latest(RegistryMock(internal_metrics.values()))
latest_metrics += generate_latest(RegistryMock(internal_metrics.values())).decode()


app = Flask(__name__)
Expand All @@ -223,7 +223,7 @@ def status():

@app.route("/metrics")
def metrics():
return latest_metrics
return Response(latest_metrics, mimetype='text/plain')


def run():
Expand Down
10 changes: 5 additions & 5 deletions exporter/coloexporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,18 +36,18 @@ def generate_metrics(pop_data, families):
[zone, 'uncached', pop_data['colo_id']],
serie['bandwidth']['uncached'])

for http_status, count in serie['requests']['http_status'].iteritems():
for http_status, count in iter(serie['requests']['http_status'].items()):
families['http_responses_sent'].add_metric(
[zone, pop_data['colo_id'], http_status], count)

families['threats_seen'].add_metric(
[zone, pop_data['colo_id']], serie['threats']['all'])

for threat, count in serie['threats']['type'].iteritems():
for threat, count in iter(serie['threats']['type'].items()):
families['threat_types'].add_metric(
[zone, pop_data['colo_id'], threat], count)

for country, count in serie['threats']['country'].iteritems():
for country, count in iter(serie['threats']['country'].items()):
families['threat_countries'].add_metric(
[zone, pop_data['colo_id'], country], count)

Expand Down Expand Up @@ -80,7 +80,7 @@ def generate_metrics(pop_data, families):

for pop_data in raw_data:
generate_metrics(pop_data, families)
return generate_latest(RegistryMock(families.values()))
return generate_latest(RegistryMock(families.values())).decode()


if __name__ == "__main__":
Expand All @@ -90,4 +90,4 @@ def generate_metrics(pop_data, families):
path = os.path.join(source_dir, "sample")

with open(path) as f:
print process(json.load(f)['result'])
print(process(json.load(f)['result']))
4 changes: 2 additions & 2 deletions exporter/dnsexporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def generate_metrics(pop_data, families):

for pop_data in raw_data:
generate_metrics(pop_data, families)
return generate_latest(RegistryMock(families.values()))
return generate_latest(RegistryMock(families.values())).decode()


if __name__ == "__main__":
Expand All @@ -50,4 +50,4 @@ def generate_metrics(pop_data, families):
path = os.path.join(source_dir, "sample-dns")

with open(path) as f:
print process(json.load(f)['result'])
print(process(json.load(f)['result']))
12 changes: 6 additions & 6 deletions exporter/wafexporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def process_metrics(data):
return [rule_hits, uri_hits]

def generate_uri_metrics(data, families):
for keystring, count in data.iteritems():
for keystring, count in iter(data.items()):
keys = ast.literal_eval(keystring)
families['waf_uri_hits'].add_metric(
[
Expand All @@ -72,7 +72,7 @@ def generate_uri_metrics(data, families):
count)

def generate_rule_metrics(data, families):
for rule_id, d in data.iteritems():
for rule_id, d in iter(data.items()):
families['waf_rule_hits'].add_metric(
[rule_id, d['message']],
d['count'])
Expand Down Expand Up @@ -106,19 +106,19 @@ def generate_rule_metrics(data, families):
# Process all data here to filter and group/sum some numbers.
waf_rule_hits, waf_uri_hits = process_metrics(raw_data)

for data, count in waf_uri_hits.iteritems():
for data, count in iter(waf_uri_hits.items()):
generate_uri_metrics({data: count}, families)

for rule, data in waf_rule_hits.iteritems():
for rule, data in iter(waf_rule_hits.items()):
generate_rule_metrics({rule: data}, families)

# Return the metrics.
return generate_latest(RegistryMock(families.values()))
return generate_latest(RegistryMock(families.values())).decode()


if __name__ == "__main__":
source_dir = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(source_dir, "sample-waf")

with open(path) as f:
print process(json.load(f)['result'])
print(process(json.load(f)['result']))