Skip to content

Commit

Permalink
Store IP and port
Browse files Browse the repository at this point in the history
  • Loading branch information
cryptobench committed Apr 26, 2024
1 parent 2392da9 commit 41d2cde
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 34 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Generated by Django 4.1.7 on 2024-04-26 10:44

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('api2', '0029_node_api2_node_online_822fe4_idx_and_more'),
]

operations = [
migrations.AddField(
model_name='relaynodes',
name='ip_address',
field=models.CharField(blank=True, max_length=15, null=True),
),
migrations.AddField(
model_name='relaynodes',
name='port',
field=models.IntegerField(blank=True, null=True),
),
]
2 changes: 2 additions & 0 deletions stats-backend/api2/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,8 @@ class PricingSnapshot(models.Model):

class RelayNodes(models.Model):
node_id = models.CharField(max_length=42, unique=True)
ip_address = models.CharField(max_length=15, null=True, blank=True)
port = models.IntegerField(null=True, blank=True)


class GolemTransactions(models.Model):
Expand Down
66 changes: 32 additions & 34 deletions stats-backend/api2/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -1109,8 +1109,6 @@ def sum_highest_runtime_resources():
)




from django.db.models import Count, F, Window
from django.db.models.functions import Lag, TruncHour
from django.utils import timezone
Expand Down Expand Up @@ -1258,35 +1256,24 @@ def online_nodes_computing():
@app.task
def fetch_and_store_relay_nodes():
base_url = "http://yacn2.dev.golem.network:9000/nodes/"
all_nodes = []

for prefix in range(256):
try:
r = requests.get(f"{base_url}{prefix:02x}")
r.raise_for_status() # Raises an HTTPError if the status is 4xx, 5xx
data = r.json()

# Process keys (node IDs) and prepare them for bulk insertion
node_ids = [key.strip().lower() for key in data.keys()]
all_nodes.extend(node_ids)
response = requests.get(f"{base_url}{prefix:02x}")
response.raise_for_status()
data = response.json()

except requests.RequestException as e:
pass # Error logging implementation
for node_id, sessions in data.items():
node_id = node_id.strip().lower()
ip_port = sessions[0]["peer"].split(":")
ip, port = ip_port[0], int(ip_port[1])

# Retrieve all existing node_ids to avoid IntegrityError on insert
existing_node_ids = set(
RelayNodes.objects.filter(node_id__in=set(all_nodes)).values_list(
"node_id", flat=True
)
)
new_nodes = [
RelayNodes(node_id=nid)
for nid in set(all_nodes)
if nid not in existing_node_ids
]
obj, created = RelayNodes.objects.update_or_create(
node_id=node_id, defaults={"ip_address": ip, "port": port}
)

# Bulk insert new nodes
RelayNodes.objects.bulk_create(new_nodes, ignore_conflicts=True)
except requests.RequestException as e:
print(f"Error fetching data for prefix {prefix:02x}: {e}")


from .models import TransactionScraperIndex, GolemTransactions
Expand Down Expand Up @@ -1762,7 +1749,15 @@ def aggregate_volume(start_date, end_date):
@app.task
def computing_total_over_time():
now = timezone.now()
formatted_data = {"7d": [], "14d": [], "1m": [], "3m": [], "6m": [], "1y": [], "All": []}
formatted_data = {
"7d": [],
"14d": [],
"1m": [],
"3m": [],
"6m": [],
"1y": [],
"All": [],
}
intervals = {
"7d": (now - timedelta(days=7), now),
"14d": (now - timedelta(days=14), now),
Expand All @@ -1774,12 +1769,15 @@ def computing_total_over_time():
}

for period, (start_date, end_date) in intervals.items():
data = ProvidersComputingMax.objects\
.filter(date__range=(start_date, end_date))\
.annotate(truncated_date=TruncDay("date"))\
.values("truncated_date")\
.annotate(total=Sum("total"))\
.order_by("truncated_date")
data = (
ProvidersComputingMax.objects.filter(date__range=(start_date, end_date))
.annotate(truncated_date=TruncDay("date"))
.values("truncated_date")
.annotate(total=Sum("total"))
.order_by("truncated_date")
)
formatted_data[period] = list(data)

r.set("computing_total_over_time", json.dumps(formatted_data, cls=DjangoJSONEncoder))

r.set(
"computing_total_over_time", json.dumps(formatted_data, cls=DjangoJSONEncoder)
)

0 comments on commit 41d2cde

Please sign in to comment.