Skip to content

Commit

Permalink
Historical computing
Browse files Browse the repository at this point in the history
  • Loading branch information
cryptobench committed Mar 29, 2024
1 parent b0fe731 commit f5cd938
Show file tree
Hide file tree
Showing 4 changed files with 54 additions and 12 deletions.
45 changes: 34 additions & 11 deletions stats-backend/api2/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -1573,7 +1573,9 @@ def aggregate_counts(start_date, end_date):
.annotate(date=TruncDay("timestamp"))
.values("date")
.annotate(
singleTransfer=Count("scanner_id", filter=Q(transaction_type="singleTransfer")),
singleTransfer=Count(
"scanner_id", filter=Q(transaction_type="singleTransfer")
),
batched=Count("scanner_id", filter=Q(transaction_type="batched")),
)
.order_by("date")
Expand Down Expand Up @@ -1700,16 +1702,8 @@ def aggregate_transactions(start_date, end_date):
.annotate(date=TruncDay("timestamp"))
.values("date")
.annotate(
on_golem=Count(
"amount",
filter=Q(tx_from_golem=True),
distinct=True
),
not_golem=Count(
"amount",
filter=Q(tx_from_golem=False),
distinct=True
),
on_golem=Count("amount", filter=Q(tx_from_golem=True), distinct=True),
not_golem=Count("amount", filter=Q(tx_from_golem=False), distinct=True),
)
.order_by("date")
)
Expand Down Expand Up @@ -1804,3 +1798,32 @@ def aggregate_volume(start_date, end_date):
r.set(
"daily_volume_golem_vs_chain", json.dumps(formatted_data, cls=DjangoJSONEncoder)
)


from collector.models import ProvidersComputing


@app.task
def computing_total_over_time():
now = timezone.now()
formatted_data = {"7d": [], "14d": [], "1m": [], "3m": [], "6m": [], "1y": [], "All": []}
intervals = {
"7d": (now - timedelta(days=7), now),
"14d": (now - timedelta(days=14), now),
"1m": (now - timedelta(days=30), now),
"3m": (now - timedelta(days=90), now),
"6m": (now - timedelta(days=180), now),
"1y": (now - timedelta(days=365), now),
"All": (ProvidersComputingMax.objects.earliest("date").date, now),
}

for period, (start_date, end_date) in intervals.items():
data = ProvidersComputingMax.objects\
.filter(date__range=(start_date, end_date))\
.annotate(truncated_date=TruncDay("date"))\
.values("truncated_date")\
.annotate(total=Sum("total"))\
.order_by("truncated_date")
formatted_data[period] = list(data)

r.set("computing_total_over_time", json.dumps(formatted_data, cls=DjangoJSONEncoder))
1 change: 1 addition & 0 deletions stats-backend/api2/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
path("network/comparison", views.list_ec2_instances_comparison),
path("network/token/golemvschain", views.daily_volume_golem_vs_chain),
path("network/transactions/volume", views.transaction_volume_over_time),
path("network/historical/computing", views.computing_total_over_time),
path("network/amount/transfer", views.amount_transferred_over_time),
path("network/transactions/type/comparison", views.transaction_type_comparison),
path("network/transactions/daily-type-counts", views.daily_transaction_type_counts),
Expand Down
13 changes: 12 additions & 1 deletion stats-backend/api2/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -769,4 +769,15 @@ async def average_transaction_value_over_time(request):
return HttpResponse(status=400)



async def computing_total_over_time(request):
if request.method == "GET":
pool = aioredis.ConnectionPool.from_url(
"redis://redis:6379/0", decode_responses=True
)
r = aioredis.Redis(connection_pool=pool)
content = await r.get("computing_total_over_time")
data = json.loads(content)
pool.disconnect()
return JsonResponse(data, safe=False, json_dumps_params={"indent": 4})
else:
return HttpResponse(status=400)
7 changes: 7 additions & 0 deletions stats-backend/core/celery.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,15 @@ def setup_periodic_tasks(sender, **kwargs):
daily_transaction_type_counts,
average_transaction_value_over_time,
daily_volume_golem_vs_chain,
computing_total_over_time,
)

sender.add_periodic_task(
60,
computing_total_over_time.s(),
queue="default",
options={"queue": "default", "routing_key": "default"},
)
sender.add_periodic_task(
60,
transaction_volume_over_time.s(),
Expand Down

0 comments on commit f5cd938

Please sign in to comment.