Skip to content

Commit

Permalink
Rework task data endpoint
Browse files Browse the repository at this point in the history
  • Loading branch information
cryptobench committed Apr 12, 2024
1 parent 8b7186d commit 6fd8d63
Show file tree
Hide file tree
Showing 3 changed files with 48 additions and 58 deletions.
44 changes: 0 additions & 44 deletions stats-backend/api2/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -1109,50 +1109,6 @@ def sum_highest_runtime_resources():
)


@app.task
def get_provider_task_data():
response = {
"testnet": {"1d": [], "7d": [], "1m": [], "1y": [], "All": []},
"mainnet": {"1d": [], "7d": [], "1m": [], "1y": [], "All": []},
}

networks = ["testnet", "mainnet"]

timeframes = {
"1d": datetime.now() - timedelta(days=1),
"7d": datetime.now() - timedelta(days=7),
"1m": datetime.now() - timedelta(days=30),
"1y": datetime.now() - timedelta(days=365),
}

for network in networks:
data = (
ProviderWithTask.objects.filter(network=network)
.prefetch_related("instance", "offer")
.select_related("offer__cheaper_than", "offer__overpriced_compared_to")
.order_by("created_at")
)

for entry in data:
entry_data = {
"providerName": entry.offer.properties.get("golem.node.id.name", ""),
"providerId": entry.instance.node_id,
"cores": entry.offer.properties.get("golem.inf.cpu.threads", 0),
"memory": entry.offer.properties.get("golem.inf.mem.gib", 0),
"disk": entry.offer.properties.get("golem.inf.storage.gib", 0),
"cpuh": entry.cpu_per_hour,
"envh": entry.env_per_hour,
"start": entry.start_price,
"date": entry.created_at.timestamp(),
}

response[network]["All"].append(entry_data)

for timeframe, start_date in timeframes.items():
if entry.created_at > start_date:
response[network][timeframe].append(entry_data)

r.set("provider_task_price_data", json.dumps(response))


from django.db.models import Count, F, Window
Expand Down
54 changes: 47 additions & 7 deletions stats-backend/api2/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,15 +41,55 @@ async def pricing_past_hour(request):
return JsonResponse({"error": str(e)}, status=500)


async def task_pricing(request):
from django.core.paginator import Paginator
from django.http import JsonResponse
from .models import ProviderWithTask


def task_pricing(request):
try:
pool = aioredis.ConnectionPool.from_url(
"redis://redis:6379/0", decode_responses=True
network = request.GET.get("network", "mainnet")
timeframe = request.GET.get("timeframe", "All")
page = int(request.GET.get("page", 1))
per_page = int(request.GET.get("per_page", 10))

data = (
ProviderWithTask.objects.filter(network=network)
.prefetch_related("instance", "offer")
.select_related("offer__cheaper_than", "offer__overpriced_compared_to")
.order_by("created_at")
)
r = aioredis.Redis(connection_pool=pool)
pricing_data = json.loads(await r.get("provider_task_price_data"))
pool.disconnect()
return JsonResponse(pricing_data)

if timeframe != "All":
start_date = datetime.now() - timedelta(days=int(timeframe[:-1]))
data = data.filter(created_at__gte=start_date)

paginator = Paginator(data, per_page)
page_data = paginator.get_page(page)

response_data = {
"results": [],
"page": page,
"per_page": per_page,
"total_pages": paginator.num_pages,
"total_results": paginator.count,
}

for entry in page_data:
entry_data = {
"providerName": entry.offer.properties.get("golem.node.id.name", ""),
"providerId": entry.instance.node_id,
"cores": entry.offer.properties.get("golem.inf.cpu.threads", 0),
"memory": entry.offer.properties.get("golem.inf.mem.gib", 0),
"disk": entry.offer.properties.get("golem.inf.storage.gib", 0),
"cpuh": entry.cpu_per_hour,
"envh": entry.env_per_hour,
"start": entry.start_price,
"date": entry.created_at.timestamp(),
}
response_data["results"].append(entry_data)

return JsonResponse(response_data)
except Exception as e:
return JsonResponse({"error": str(e)}, status=500)

Expand Down
8 changes: 1 addition & 7 deletions stats-backend/core/celery.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ def setup_periodic_tasks(sender, **kwargs):
median_and_average_pricing_past_hour,
chart_pricing_data_for_frontend,
v2_network_online_to_redis_new_stats_page,
get_provider_task_data,
online_nodes_uptime_donut_data,
v2_network_stats_to_redis,
sum_highest_runtime_resources,
Expand Down Expand Up @@ -191,12 +190,7 @@ def setup_periodic_tasks(sender, **kwargs):
queue="default",
options={"queue": "default", "routing_key": "default"},
)
sender.add_periodic_task(
crontab(minute="*/11"),
get_provider_task_data.s(),
queue="default",
options={"queue": "default", "routing_key": "default"},
)

sender.add_periodic_task(
crontab(minute="*/10"),
chart_pricing_data_for_frontend.s(),
Expand Down

0 comments on commit 6fd8d63

Please sign in to comment.