Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: metric labels are not cleaned up when new data is fetched #8

Merged
merged 2 commits into from
Mar 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 40 additions & 46 deletions app/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,26 @@
# -*- coding:utf-8 -*-
# Filename: exporter.py

import logging
import time
import boto3
import botocore
from datetime import datetime

import boto3
from dateutil.relativedelta import relativedelta
from prometheus_client import Gauge
import logging


class MetricExporter:
def __init__(self, polling_interval_seconds, metric_name, aws_access_key, aws_access_secret, aws_assumed_role_name, group_by, targets):
def __init__(
self,
polling_interval_seconds,
metric_name,
aws_access_key,
aws_access_secret,
aws_assumed_role_name,
group_by,
targets,
):
self.polling_interval_seconds = polling_interval_seconds
self.metric_name = metric_name
self.targets = targets
Expand All @@ -27,14 +36,15 @@ def __init__(self, polling_interval_seconds, metric_name, aws_access_key, aws_ac
if group_by["enabled"]:
for group in group_by["groups"]:
self.labels.add(group["label_name"])
self.aws_daily_cost_usd = Gauge(
self.metric_name, "Daily cost of an AWS account in USD", self.labels)
self.aws_daily_cost_usd = Gauge(self.metric_name, "Daily cost of an AWS account in USD", self.labels)

def run_metrics_loop(self):
while True:
# every time we clear up all the existing labels before setting new ones
self.aws_daily_cost_usd .clear()

for aws_account in self.targets:
logging.info("querying cost data for aws account %s" %
aws_account["Publisher"])
logging.info("querying cost data for aws account %s" % aws_account["Publisher"])
try:
self.fetch(aws_account)
except Exception as e:
Expand All @@ -50,8 +60,7 @@ def get_aws_account_session(self, account_id):
)

assumed_role_object = sts_client.assume_role(
RoleArn=f"arn:aws:iam::{account_id}:role/{self.aws_assumed_role_name}",
RoleSessionName="AssumeRoleSession1"
RoleArn=f"arn:aws:iam::{account_id}:role/{self.aws_assumed_role_name}", RoleSessionName="AssumeRoleSession1"
)

return assumed_role_object["Credentials"]
Expand All @@ -62,75 +71,60 @@ def query_aws_cost_explorer(self, aws_client, group_by):
groups = list()
if group_by["enabled"]:
for group in group_by["groups"]:
groups.append({
"Type": group["type"],
"Key": group["key"]
})
groups.append({"Type": group["type"], "Key": group["key"]})

response = aws_client.get_cost_and_usage(
TimePeriod={
"Start": start_date.strftime("%Y-%m-%d"),
"End": end_date.strftime("%Y-%m-%d")
},
Filter={
"Dimensions": {
"Key": "RECORD_TYPE",
"Values": ["Usage"]
}
},
TimePeriod={"Start": start_date.strftime("%Y-%m-%d"), "End": end_date.strftime("%Y-%m-%d")},
Filter={"Dimensions": {"Key": "RECORD_TYPE", "Values": ["Usage"]}},
Granularity="DAILY",
Metrics=[
"UnblendedCost"
],
GroupBy=groups
Metrics=["UnblendedCost"],
GroupBy=groups,
)
return response["ResultsByTime"]

def fetch(self, aws_account):
aws_credentials = self.get_aws_account_session(
aws_account["Publisher"])
aws_credentials = self.get_aws_account_session(aws_account["Publisher"])

aws_client = boto3.client(
"ce",
aws_access_key_id=aws_credentials["AccessKeyId"],
aws_secret_access_key=aws_credentials["SecretAccessKey"],
aws_session_token=aws_credentials["SessionToken"],
region_name="us-east-1"
region_name="us-east-1",
)
cost_response = self.query_aws_cost_explorer(
aws_client, self.group_by)
cost_response = self.query_aws_cost_explorer(aws_client, self.group_by)

for result in cost_response:
if not self.group_by["enabled"]:
cost = float(result["Total"]["UnblendedCost"]["Amount"])
self.aws_daily_cost_usd.labels(
**aws_account, ChargeType="Usage").set(cost)
self.aws_daily_cost_usd.labels(**aws_account, ChargeType="Usage").set(cost)
else:
merged_minor_cost = 0
for item in result["Groups"]:
cost = float(item["Metrics"]
["UnblendedCost"]["Amount"])
cost = float(item["Metrics"]["UnblendedCost"]["Amount"])

group_key_values = dict()
for i in range(len(self.group_by["groups"])):
if self.group_by["groups"][i]["type"] == "TAG":
value = item["Keys"][i].split("$")[1]
else:
value = item["Keys"][i]
group_key_values.update(
{self.group_by["groups"][i]["label_name"]: value})
group_key_values.update({self.group_by["groups"][i]["label_name"]: value})

if self.group_by["merge_minor_cost"]["enabled"] and \
cost < self.group_by["merge_minor_cost"]["threshold"]:
if (
self.group_by["merge_minor_cost"]["enabled"]
and cost < self.group_by["merge_minor_cost"]["threshold"]
):
merged_minor_cost += cost
else:
self.aws_daily_cost_usd.labels(
**aws_account, **group_key_values, ChargeType="Usage").set(cost)
self.aws_daily_cost_usd.labels(**aws_account, **group_key_values, ChargeType="Usage").set(cost)

if merged_minor_cost > 0:
group_key_values = dict()
for i in range(len(self.group_by["groups"])):
group_key_values.update(
{self.group_by["groups"][i]["label_name"]: self.group_by["merge_minor_cost"]["tag_value"]})
self.aws_daily_cost_usd.labels(
**aws_account, **group_key_values, ChargeType="Usage").set(merged_minor_cost)
{self.group_by["groups"][i]["label_name"]: self.group_by["merge_minor_cost"]["tag_value"]}
)
self.aws_daily_cost_usd.labels(**aws_account, **group_key_values, ChargeType="Usage").set(
merged_minor_cost
)
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{
"name": "aws-cost-exporter",
"version": "v1.0.3"
}
"name": "aws-cost-exporter",
"version": "v1.0.4"
}
Loading