Skip to content

Commit

Permalink
Updated pricing endpoints
Browse files Browse the repository at this point in the history
  • Loading branch information
cryptobench committed Feb 26, 2024
1 parent 9c86362 commit 27d0368
Show file tree
Hide file tree
Showing 4 changed files with 84 additions and 21 deletions.
74 changes: 58 additions & 16 deletions stats-backend/api2/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -688,32 +688,74 @@ def create_pricing_snapshot():


@app.task
def median_pricing_past_hour():
def median_and_average_pricing_past_hour():
try:
last_hour = timezone.now() - timedelta(hours=1)
cpu_median = median(
ProviderWithTask.objects.filter(created_at__gte=last_hour)
.values_list("cpu_per_hour", flat=True)
.exclude(cpu_per_hour__isnull=True)
cpu_values = ProviderWithTask.objects.filter(created_at__gte=last_hour).exclude(
cpu_per_hour__isnull=True
)
env_median = median(
ProviderWithTask.objects.filter(created_at__gte=last_hour)
.values_list("env_per_hour", flat=True)
.exclude(env_per_hour__isnull=True)
)
start_median = median(
ProviderWithTask.objects.filter(created_at__gte=last_hour)
.values_list("start_price", flat=True)
.exclude(start_price__isnull=True)
env_values = ProviderWithTask.objects.filter(created_at__gte=last_hour).exclude(
env_per_hour__isnull=True
)
start_values = ProviderWithTask.objects.filter(
created_at__gte=last_hour
).exclude(start_price__isnull=True)

cpu_median = median(cpu_values.values_list("cpu_per_hour", flat=True))
cpu_average = cpu_values.aggregate(Avg("cpu_per_hour"))["cpu_per_hour__avg"]

env_median = median(env_values.values_list("env_per_hour", flat=True))
env_average = env_values.aggregate(Avg("env_per_hour"))["env_per_hour__avg"]

start_median = median(start_values.values_list("start_price", flat=True))
start_average = start_values.aggregate(Avg("start_price"))["start_price__avg"]

pricing_data = {
"cpu_median": cpu_median,
"cpu_average": cpu_average,
"env_median": env_median,
"env_average": env_average,
"start_median": start_median,
"start_average": start_average,
}
print(f"Median pricing data: {pricing_data}")
print(f"Median and average pricing data: {pricing_data}")

r.set("pricing_median", json.dumps(pricing_data))
r.set("pricing_past_hour_v2", json.dumps(pricing_data))
except Exception as e:
print(e) # Replace with proper logging mechanism


import numpy as np


@app.task
def chart_pricing_data_for_frontend():
def pricing_snapshot_stats_with_dates(start_date, end_date):
snapshot_data = PricingSnapshot.objects.filter(
created_at__range=(start_date, end_date)
).order_by("created_at")
data = []
for snapshot in snapshot_data:
data_entry = {
"date": snapshot.created_at.timestamp(),
"average_cpu": snapshot.average_cpu_price,
"median_cpu": snapshot.median_cpu_price,
"average_env": snapshot.average_env_price,
"median_env": snapshot.median_env_price,
"average_start": snapshot.average_start_price,
"median_start": snapshot.median_start_price,
}
data.append(data_entry)
return data

now = datetime.now()
data = {
"1w": pricing_snapshot_stats_with_dates(now - timedelta(weeks=1), now),
"2w": pricing_snapshot_stats_with_dates(now - timedelta(weeks=2), now),
"4w": pricing_snapshot_stats_with_dates(now - timedelta(weeks=4), now),
"All": pricing_snapshot_stats_with_dates(
PricingSnapshot.objects.earliest("created_at").created_at, now
),
}

r.set("pricing_data_charted_v2", json.dumps(data))
3 changes: 2 additions & 1 deletion stats-backend/api2/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@
path("network/online", views.network_online),
path("network/online/flatmap", views.network_online_flatmap),
path("network/offers/cheapest/cores", views.cheapest_by_cores),
path("network/pricing/median/1h", views.get_median_pricing_1h),
path("network/pricing/1h", views.pricing_past_hour),
path("network/pricing/historical", views.historical_pricing_data),
path("provider/wallet/<wallet>", views.node_wallet),
path("provider/node/<yagna_id>", views.node),
path("provider/uptime/<yagna_id>", views.node_uptime),
Expand Down
17 changes: 15 additions & 2 deletions stats-backend/api2/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,13 @@
from .scoring import calculate_uptime_percentage


async def get_median_pricing_1h(request):
async def pricing_past_hour(request):
try:
pool = aioredis.ConnectionPool.from_url(
"redis://redis:6379/0", decode_responses=True
)
r = aioredis.Redis(connection_pool=pool)
pricing_data = json.loads(await r.get("pricing_median"))
pricing_data = json.loads(await r.get("pricing_past_hour_v2"))
pool.disconnect()
return JsonResponse(pricing_data)
except Exception as e:
Expand Down Expand Up @@ -72,6 +72,19 @@ async def network_historical_stats(request):
return HttpResponse(status=400)


async def historical_pricing_data(request):
if request.method == "GET":
pool = aioredis.ConnectionPool.from_url(
"redis://redis:6379/0", decode_responses=True
)
r = aioredis.Redis(connection_pool=pool)
content = await r.get("pricing_data_charted_v2")
data = json.loads(content) if content else {}
return JsonResponse(data, safe=False, json_dumps_params={"indent": 4})
else:
return HttpResponse(status=400)


@api_view(["GET"])
def node_uptime(request, yagna_id):
node = Node.objects.filter(node_id=yagna_id).first()
Expand Down
11 changes: 9 additions & 2 deletions stats-backend/core/celery.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,8 @@ def setup_periodic_tasks(sender, **kwargs):
compare_ec2_and_golem,
providers_who_received_tasks,
create_pricing_snapshot,
median_pricing_past_hour,
median_and_average_pricing_past_hour,
chart_pricing_data_for_frontend,
)

# sender.add_periodic_task(
Expand All @@ -83,9 +84,15 @@ def setup_periodic_tasks(sender, **kwargs):
queue="default",
options={"queue": "default", "routing_key": "default"},
)
sender.add_periodic_task(
crontab(minute="*/10"),
chart_pricing_data_for_frontend.s(),
queue="default",
options={"queue": "default", "routing_key": "default"},
)
sender.add_periodic_task(
60,
median_pricing_past_hour.s(),
median_and_average_pricing_past_hour.s(),
queue="default",
options={"queue": "default", "routing_key": "default"},
)
Expand Down

0 comments on commit 27d0368

Please sign in to comment.