Skip to content

Commit

Permalink
Scan relay nodes
Browse files Browse the repository at this point in the history
  • Loading branch information
cryptobench committed Mar 20, 2024
1 parent f716e38 commit 6a8921c
Show file tree
Hide file tree
Showing 5 changed files with 96 additions and 13 deletions.
20 changes: 20 additions & 0 deletions stats-backend/api2/migrations/0023_relaynodes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Generated by Django 4.1.7 on 2024-03-20 13:17

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('api2', '0022_alter_pricingsnapshot_date'),
]

operations = [
migrations.CreateModel(
name='RelayNodes',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('node_id', models.CharField(max_length=42, unique=True)),
],
),
]
5 changes: 5 additions & 0 deletions stats-backend/api2/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,3 +104,8 @@ class PricingSnapshot(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
date = models.DateTimeField(null=True, blank=True)
network = models.CharField(max_length=42, default="mainnet")



class RelayNodes(models.Model):
node_id = models.CharField(max_length=42, unique=True)
37 changes: 37 additions & 0 deletions stats-backend/api2/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -1292,3 +1292,40 @@ def online_nodes_computing():
NodeV1.objects.exclude(node_id__in=computing_node_ids).update(
computing_now=False
)


from .models import RelayNodes


@app.task
def fetch_and_store_relay_nodes():
base_url = "http://yacn2.dev.golem.network:9000/nodes/"
all_nodes = []

for prefix in range(256):
try:
r = requests.get(f"{base_url}{prefix:02x}")
r.raise_for_status() # Raises an HTTPError if the status is 4xx, 5xx
data = r.json()

# Process keys (node IDs) and prepare them for bulk insertion
node_ids = [key.strip().lower() for key in data.keys()]
all_nodes.extend(node_ids)

except requests.RequestException as e:
pass # Error logging implementation

# Retrieve all existing node_ids to avoid IntegrityError on insert
existing_node_ids = set(
RelayNodes.objects.filter(node_id__in=set(all_nodes)).values_list(
"node_id", flat=True
)
)
new_nodes = [
RelayNodes(node_id=nid)
for nid in set(all_nodes)
if nid not in existing_node_ids
]

# Bulk insert new nodes
RelayNodes.objects.bulk_create(new_nodes, ignore_conflicts=True)
40 changes: 27 additions & 13 deletions stats-backend/api2/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,33 +375,47 @@ async def cpu_architecture_stats(request):


from collector.models import Requestors
from .models import RelayNodes


def get_transfer_sum(request, node_id, epoch):
try:
url = f"http://polygongas.org:14059/erc20/api/stats/transfers?chain=137&account={node_id}&from={epoch}"
response = requests.get(url)
if response.status_code != 200:
return JsonResponse({'error': 'Failed to get data from API'}, status=500)
return JsonResponse({"error": "Failed to get data from API"}, status=500)
data = response.json()
from_addrs = [t["fromAddr"] for t in data.get("transfers", [])]
from_addrs_in_db = Requestors.objects.filter(node_id__in=from_addrs).values_list("node_id", flat=True)

transfers = data.get("transfers", [])
from_addrs = {t["fromAddr"] for t in transfers}
matched_addrs = set(
Requestors.objects.filter(node_id__in=from_addrs).values_list(
"node_id", flat=True
)
)
matched_addrs.update(
RelayNodes.objects.filter(node_id__in=from_addrs).values_list(
"node_id", flat=True
)
)

total_amount_wei_matched = sum(
int(t["tokenAmount"])
for t in data.get("transfers", [])
if t["fromAddr"] in from_addrs_in_db
int(t["tokenAmount"]) for t in transfers if t["fromAddr"] in matched_addrs
)
total_amount_wei_not_matched = sum(
int(t["tokenAmount"])
for t in data.get("transfers", [])
if t["fromAddr"] not in from_addrs_in_db
for t in transfers
if t["fromAddr"] not in matched_addrs
)

return JsonResponse(
{
"total_amount_matched": total_amount_wei_matched / 1e18,
"total_amount_not_matched": total_amount_wei_not_matched / 1e18,
}
)
return JsonResponse({
'total_amount_matched': total_amount_wei_matched / 1e18,
'total_amount_not_matched': total_amount_wei_not_matched / 1e18
})
except Exception as e:
return JsonResponse({'error': str(e)}, status=500)
return JsonResponse({"error": str(e)}, status=500)


async def network_online(request):
Expand Down
7 changes: 7 additions & 0 deletions stats-backend/core/celery.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ def setup_periodic_tasks(sender, **kwargs):
count_cpu_vendors,
count_cpu_architecture,
online_nodes_computing,
fetch_and_store_relay_nodes,
)

sender.add_periodic_task(
Expand All @@ -71,6 +72,12 @@ def setup_periodic_tasks(sender, **kwargs):
queue="default",
options={"queue": "default", "routing_key": "default"},
)
sender.add_periodic_task(
30,
fetch_and_store_relay_nodes.s(),
queue="default",
options={"queue": "default", "routing_key": "default"},
)
sender.add_periodic_task(
crontab(minute="*/60"),
fetch_yagna_release.s(),
Expand Down

0 comments on commit 6a8921c

Please sign in to comment.