From 6fd8d63ad72bb698ef34c18de63a7b666e32b6ec Mon Sep 17 00:00:00 2001 From: Phillip Jensen Date: Fri, 12 Apr 2024 14:44:40 +0200 Subject: [PATCH] Rework task data endpoint --- stats-backend/api2/tasks.py | 44 ----------------------------- stats-backend/api2/views.py | 54 +++++++++++++++++++++++++++++++----- stats-backend/core/celery.py | 8 +----- 3 files changed, 48 insertions(+), 58 deletions(-) diff --git a/stats-backend/api2/tasks.py b/stats-backend/api2/tasks.py index 5f12215..e916ee4 100644 --- a/stats-backend/api2/tasks.py +++ b/stats-backend/api2/tasks.py @@ -1109,50 +1109,6 @@ def sum_highest_runtime_resources(): ) -@app.task -def get_provider_task_data(): - response = { - "testnet": {"1d": [], "7d": [], "1m": [], "1y": [], "All": []}, - "mainnet": {"1d": [], "7d": [], "1m": [], "1y": [], "All": []}, - } - - networks = ["testnet", "mainnet"] - - timeframes = { - "1d": datetime.now() - timedelta(days=1), - "7d": datetime.now() - timedelta(days=7), - "1m": datetime.now() - timedelta(days=30), - "1y": datetime.now() - timedelta(days=365), - } - - for network in networks: - data = ( - ProviderWithTask.objects.filter(network=network) - .prefetch_related("instance", "offer") - .select_related("offer__cheaper_than", "offer__overpriced_compared_to") - .order_by("created_at") - ) - - for entry in data: - entry_data = { - "providerName": entry.offer.properties.get("golem.node.id.name", ""), - "providerId": entry.instance.node_id, - "cores": entry.offer.properties.get("golem.inf.cpu.threads", 0), - "memory": entry.offer.properties.get("golem.inf.mem.gib", 0), - "disk": entry.offer.properties.get("golem.inf.storage.gib", 0), - "cpuh": entry.cpu_per_hour, - "envh": entry.env_per_hour, - "start": entry.start_price, - "date": entry.created_at.timestamp(), - } - - response[network]["All"].append(entry_data) - - for timeframe, start_date in timeframes.items(): - if entry.created_at > start_date: - response[network][timeframe].append(entry_data) - - r.set("provider_task_price_data", json.dumps(response)) from django.db.models import Count, F, Window diff --git a/stats-backend/api2/views.py b/stats-backend/api2/views.py index 98d14d2..b621213 100644 --- a/stats-backend/api2/views.py +++ b/stats-backend/api2/views.py @@ -41,15 +41,55 @@ async def pricing_past_hour(request): return JsonResponse({"error": str(e)}, status=500) -async def task_pricing(request): +from django.core.paginator import Paginator +from django.http import JsonResponse +from .models import ProviderWithTask + + +def task_pricing(request): try: - pool = aioredis.ConnectionPool.from_url( - "redis://redis:6379/0", decode_responses=True + network = request.GET.get("network", "mainnet") + timeframe = request.GET.get("timeframe", "All") + page = int(request.GET.get("page", 1)) + per_page = int(request.GET.get("per_page", 10)) + + data = ( + ProviderWithTask.objects.filter(network=network) + .prefetch_related("instance", "offer") + .select_related("offer__cheaper_than", "offer__overpriced_compared_to") + .order_by("created_at") ) - r = aioredis.Redis(connection_pool=pool) - pricing_data = json.loads(await r.get("provider_task_price_data")) - pool.disconnect() - return JsonResponse(pricing_data) + + if timeframe != "All": + start_date = datetime.now() - timedelta(days=int(timeframe[:-1])) + data = data.filter(created_at__gte=start_date) + + paginator = Paginator(data, per_page) + page_data = paginator.get_page(page) + + response_data = { + "results": [], + "page": page, + "per_page": per_page, + "total_pages": paginator.num_pages, + "total_results": paginator.count, + } + + for entry in page_data: + entry_data = { + "providerName": entry.offer.properties.get("golem.node.id.name", ""), + "providerId": entry.instance.node_id, + "cores": entry.offer.properties.get("golem.inf.cpu.threads", 0), + "memory": entry.offer.properties.get("golem.inf.mem.gib", 0), + "disk": entry.offer.properties.get("golem.inf.storage.gib", 0), + "cpuh": entry.cpu_per_hour, + "envh": entry.env_per_hour, + "start": entry.start_price, + "date": entry.created_at.timestamp(), + } + response_data["results"].append(entry_data) + + return JsonResponse(response_data) except Exception as e: return JsonResponse({"error": str(e)}, status=500) diff --git a/stats-backend/core/celery.py b/stats-backend/core/celery.py index 6eb6a2f..0345226 100644 --- a/stats-backend/core/celery.py +++ b/stats-backend/core/celery.py @@ -56,7 +56,6 @@ def setup_periodic_tasks(sender, **kwargs): median_and_average_pricing_past_hour, chart_pricing_data_for_frontend, v2_network_online_to_redis_new_stats_page, - get_provider_task_data, online_nodes_uptime_donut_data, v2_network_stats_to_redis, sum_highest_runtime_resources, @@ -191,12 +190,7 @@ def setup_periodic_tasks(sender, **kwargs): queue="default", options={"queue": "default", "routing_key": "default"}, ) - sender.add_periodic_task( - crontab(minute="*/11"), - get_provider_task_data.s(), - queue="default", - options={"queue": "default", "routing_key": "default"}, - ) + sender.add_periodic_task( crontab(minute="*/10"), chart_pricing_data_for_frontend.s(),