diff --git a/nginx.conf b/nginx.conf index 5bd3146..2a84399 100644 --- a/nginx.conf +++ b/nginx.conf @@ -1,4 +1,10 @@ -worker_processes auto; # Changed from 1 to auto for better performance +# Use all available CPU cores +worker_processes auto; + +# Configure worker settings +worker_rlimit_nofile 65535; +timer_resolution 100ms; +pcre_jit on; events { worker_connections 4096; @@ -7,110 +13,250 @@ events { } http { - include mime.types; - default_type application/octet-stream; + # Basic settings + include mime.types; + default_type application/octet-stream; + server_tokens off; + + # Character encoding + charset utf-8; + charset_types text/css text/plain text/vnd.wap.wml text/javascript text/markdown text/calendar text/x-component text/x-cross-domain-policy; + + # Optimization for file handling + sendfile on; + tcp_nopush on; + tcp_nodelay on; + aio threads; + directio 512; + + # Buffer size optimization + client_body_buffer_size 128k; + client_max_body_size 0; + client_header_buffer_size 32k; + large_client_header_buffers 4 32k; + + # Timeouts + client_body_timeout 300s; + client_header_timeout 300s; + send_timeout 600s; + keepalive_timeout 650s; + keepalive_requests 1000; # HTTP/2 specific settings http2_max_field_size 16k; http2_max_header_size 32k; http2_max_requests 1000; - http2_idle_timeout 5m; + http2_idle_timeout 300s; - # Global timeout settings - proxy_connect_timeout 600; - proxy_send_timeout 600; - proxy_read_timeout 1800; - send_timeout 600; - keepalive_timeout 650; - - sendfile on; - tcp_nopush on; - tcp_nodelay on; - - # Buffering settings for large responses + # Proxy settings + proxy_connect_timeout 600s; + proxy_send_timeout 600s; + proxy_read_timeout 1800s; proxy_buffer_size 128k; proxy_buffers 8 256k; proxy_busy_buffers_size 256k; proxy_temp_file_write_size 256k; proxy_max_temp_file_size 0; + proxy_request_buffering on; + proxy_http_version 1.1; + proxy_buffering on; + + # Compression settings + gzip on; + gzip_comp_level 5; + gzip_min_length 256; + gzip_proxied any; + gzip_vary on; + gzip_types + application/javascript + application/json + application/xml + text/css + text/plain + text/xml + text/javascript; + + # File cache settings + open_file_cache max=1000 inactive=20s; + open_file_cache_valid 30s; + open_file_cache_min_uses 2; + open_file_cache_errors on; + + # Common security headers + add_header X-Content-Type-Options nosniff always; + add_header X-Frame-Options SAMEORIGIN always; + add_header X-XSS-Protection "1; mode=block" always; + add_header Referrer-Policy strict-origin-when-cross-origin always; + # UAT Server Configuration server { listen 80 http2; server_name uat.vespadb.be; - # Increase client body size limit - client_max_body_size 0; # Disabled limit for large files - + # Static files location /static/ { alias /workspaces/vespadb/collected_static/; + expires 30d; + access_log off; + add_header Cache-Control "public, no-transform"; } location /media/ { alias /workspaces/vespadb/media/; + expires 30d; + access_log off; + add_header Cache-Control "public, no-transform"; } + # API and main application location / { proxy_pass http://127.0.0.1:8000; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Connection ""; + + # CORS headers for API + add_header 'Access-Control-Allow-Origin' $http_origin always; + add_header 'Access-Control-Allow-Credentials' 'true' always; + add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always; + add_header 'Access-Control-Allow-Headers' 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since' always; + + # Handle OPTIONS method for CORS + if ($request_method = 'OPTIONS') { + add_header 'Access-Control-Allow-Origin' $http_origin always; + add_header 'Access-Control-Allow-Credentials' 'true' always; + add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always; + add_header 'Access-Control-Allow-Headers' 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since' always; + add_header 'Access-Control-Max-Age' 1728000; + add_header 'Content-Type' 'text/plain charset=UTF-8'; + add_header 'Content-Length' 0; + return 204; + } + } - # HTTP/2 specific - proxy_http_version 1.1; + # Special handling for export endpoint + location /api/observations/export_direct { + proxy_pass http://127.0.0.1:8000; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header Connection ""; - # Timeouts - proxy_connect_timeout 600s; - proxy_send_timeout 600s; + # Extended timeouts for long-running exports proxy_read_timeout 1800s; + proxy_connect_timeout 600s; + proxy_send_timeout 600s; - # Buffer settings for large files - proxy_buffering on; - proxy_request_buffering on; + # Buffer settings for large responses proxy_buffer_size 128k; proxy_buffers 8 256k; proxy_busy_buffers_size 256k; + + # CORS headers + add_header 'Access-Control-Allow-Origin' $http_origin always; + add_header 'Access-Control-Allow-Credentials' 'true' always; + add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always; + add_header 'Access-Control-Allow-Headers' 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since' always; + + if ($request_method = 'OPTIONS') { + add_header 'Access-Control-Allow-Origin' $http_origin always; + add_header 'Access-Control-Allow-Credentials' 'true' always; + add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always; + add_header 'Access-Control-Allow-Headers' 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since' always; + add_header 'Access-Control-Max-Age' 1728000; + add_header 'Content-Type' 'text/plain charset=UTF-8'; + add_header 'Content-Length' 0; + return 204; + } } } + # Production Server Configuration server { listen 80 http2; server_name data.vespawatch.be; - # Increase client body size limit - client_max_body_size 0; # Disabled limit for large files - + # Static files location /static/ { alias /workspaces/vespadb/collected_static/; + expires 30d; + access_log off; + add_header Cache-Control "public, no-transform"; } location /media/ { alias /workspaces/vespadb/media/; + expires 30d; + access_log off; + add_header Cache-Control "public, no-transform"; } + # API and main application location / { proxy_pass http://127.0.0.1:8000; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Connection ""; + + # CORS headers for API + add_header 'Access-Control-Allow-Origin' $http_origin always; + add_header 'Access-Control-Allow-Credentials' 'true' always; + add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always; + add_header 'Access-Control-Allow-Headers' 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since' always; + + # Handle OPTIONS method for CORS + if ($request_method = 'OPTIONS') { + add_header 'Access-Control-Allow-Origin' $http_origin always; + add_header 'Access-Control-Allow-Credentials' 'true' always; + add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always; + add_header 'Access-Control-Allow-Headers' 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since' always; + add_header 'Access-Control-Max-Age' 1728000; + add_header 'Content-Type' 'text/plain charset=UTF-8'; + add_header 'Content-Length' 0; + return 204; + } + } - # HTTP/2 specific - proxy_http_version 1.1; + # Special handling for export endpoint + location /api/observations/export_direct { + proxy_pass http://127.0.0.1:8000; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header Connection ""; - # Timeouts + # Extended timeouts for long-running exports + proxy_read_timeout 1800s; proxy_connect_timeout 600s; proxy_send_timeout 600s; - proxy_read_timeout 1800s; - # Buffer settings for large files - proxy_buffering on; - proxy_request_buffering on; + # Buffer settings for large responses proxy_buffer_size 128k; proxy_buffers 8 256k; proxy_busy_buffers_size 256k; + + # CORS headers + add_header 'Access-Control-Allow-Origin' $http_origin always; + add_header 'Access-Control-Allow-Credentials' 'true' always; + add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always; + add_header 'Access-Control-Allow-Headers' 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since' always; + + if ($request_method = 'OPTIONS') { + add_header 'Access-Control-Allow-Origin' $http_origin always; + add_header 'Access-Control-Allow-Credentials' 'true' always; + add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always; + add_header 'Access-Control-Allow-Headers' 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since' always; + add_header 'Access-Control-Max-Age' 1728000; + add_header 'Content-Type' 'text/plain charset=UTF-8'; + add_header 'Content-Length' 0; + return 204; + } } } -} +} \ No newline at end of file diff --git a/src/stores/vespaStore.js b/src/stores/vespaStore.js index 26d3205..a97be28 100644 --- a/src/stores/vespaStore.js +++ b/src/stores/vespaStore.js @@ -309,54 +309,60 @@ export const useVespaStore = defineStore('vespaStore', { }, async exportData(format) { try { - this.isExporting = true; // Start loading indicator + this.isExporting = true; + + // Get the current filter query + const filterQuery = this.createFilterQuery(); + + // Make the export request const response = await ApiService.get( - `/observations/export?${this.createFilterQuery()}` + `/observations/export_direct?${filterQuery}`, + { + responseType: 'blob', + timeout: 300000, // 5 minute timeout + headers: { + 'Accept': 'text/csv', + } + } ); - if (response.status === 200) { - const { export_id } = response.data; + // Create and trigger download + const blob = new Blob([response.data], { type: 'text/csv' }); + const url = window.URL.createObjectURL(blob); + const link = document.createElement('a'); + link.href = url; + link.setAttribute( + 'download', + `observations_export_${new Date().getTime()}.csv` + ); - const checkStatus = async () => { - const statusResponse = await ApiService.get( - `/observations/export_status?export_id=${export_id}` - ); + // Trigger download + document.body.appendChild(link); + link.click(); - if (statusResponse.data.status === 'completed') { - const downloadResponse = await ApiService.get( - `/observations/download_export/?export_id=${export_id}`, - { responseType: 'blob' } - ); + // Cleanup + document.body.removeChild(link); + window.URL.revokeObjectURL(url); + this.isExporting = false; - const blob = new Blob([downloadResponse.data], { type: 'text/csv' }); - const url = window.URL.createObjectURL(blob); - const link = document.createElement('a'); - link.href = url; - link.setAttribute('download', `observations_export_${new Date().getTime()}.csv`); - document.body.appendChild(link); - link.click(); - document.body.removeChild(link); - window.URL.revokeObjectURL(url); - this.isExporting = false; // Stop loading indicator - return true; - } else if (statusResponse.data.status === 'failed') { - this.isExporting = false; // Stop loading indicator on error - throw new Error(statusResponse.data.error || 'Export failed'); - } + } catch (error) { + this.isExporting = false; + console.error('Error exporting data:', error); - return new Promise(resolve => { - setTimeout(async () => { - resolve(await checkStatus()); - }, 2000); - }); - }; + // Handle specific error cases + let errorMessage = 'Export failed. Please try again.'; - await checkStatus(); + if (error.response) { + if (error.response.status === 400) { + errorMessage = error.response.data.error || 'Invalid export request'; + } else if (error.response.status === 403) { + errorMessage = 'You do not have permission to export data'; + } else if (error.response.status === 504) { + errorMessage = 'Export timed out. Please try with fewer filters'; + } } - } catch (error) { - this.isExporting = false; // Stop loading indicator on error - console.error('Error exporting data:', error); - throw error; + + throw new Error(errorMessage); } }, async fetchMunicipalitiesByProvinces(provinceIds) { diff --git a/vespadb/observations/tasks/export_utils.py b/vespadb/observations/tasks/export_utils.py new file mode 100644 index 0000000..cbb2cfa --- /dev/null +++ b/vespadb/observations/tasks/export_utils.py @@ -0,0 +1,109 @@ +from typing import Iterator, List, Set, Any, Union, Protocol +from django.db.models.query import QuerySet +from django.db.models import Model +import csv +import logging +from ..models import Observation + +logger = logging.getLogger(__name__) + +class WriterProtocol(Protocol): + def writerow(self, row: List[str]) -> Any: ... + +CSV_HEADERS = [ + "id", "created_datetime", "modified_datetime", "latitude", "longitude", + "source", "source_id", "nest_height", "nest_size", "nest_location", + "nest_type", "observation_datetime", "province", "eradication_date", + "municipality", "images", "anb_domain", "notes", "eradication_result", + "wn_id", "wn_validation_status", "nest_status" +] + +def get_status(observation: Observation) -> str: + """Get observation status string.""" + if observation.eradication_result: + return "eradicated" + if observation.reserved_by: + return "reserved" + return "untreated" + +def prepare_row_data( + observation: Observation, + is_admin: bool, + user_municipality_ids: Set[str] +) -> List[str]: + """ + Prepare a single row of data for the CSV export with error handling. + """ + try: + # Determine allowed fields based on permissions + if is_admin or (observation.municipality_id in user_municipality_ids): + allowed_fields = CSV_HEADERS + else: + allowed_fields = ["id", "created_datetime", "latitude", "longitude", "source", + "nest_height", "nest_type", "observation_datetime", "province", + "municipality", "nest_status", "source_id", "anb_domain"] + + row_data: List[str] = [] + for field in CSV_HEADERS: + try: + if field not in allowed_fields: + row_data.append("") + continue + + if field == "latitude": + row_data.append(str(observation.location.y) if observation.location else "") + elif field == "longitude": + row_data.append(str(observation.location.x) if observation.location else "") + elif field in ["created_datetime", "modified_datetime", "observation_datetime"]: + datetime_val = getattr(observation, field, None) + if datetime_val: + datetime_val = datetime_val.replace(microsecond=0) + row_data.append(datetime_val.isoformat() + "Z") + else: + row_data.append("") + elif field == "province": + row_data.append(observation.province.name if observation.province else "") + elif field == "municipality": + row_data.append(observation.municipality.name if observation.municipality else "") + elif field == "anb_domain": + row_data.append(str(observation.anb)) + elif field == "nest_status": + row_data.append(get_status(observation)) + elif field == "source_id": + row_data.append(str(observation.source_id) if observation.source_id is not None else "") + else: + value = getattr(observation, field, "") + row_data.append(str(value) if value is not None else "") + except Exception as e: + logger.warning(f"Error processing field {field} for observation {observation.id}: {str(e)}") + row_data.append("") + + return row_data + except Exception as e: + logger.error(f"Error preparing row data for observation {observation.id}: {str(e)}") + return [""] * len(CSV_HEADERS) + +def generate_rows( + queryset: QuerySet[Model], + writer: WriterProtocol, + is_admin: bool, + user_municipality_ids: Set[str] +) -> Iterator[Any]: + """ + Generate CSV rows for streaming. + """ + # First yield the headers + yield writer.writerow(CSV_HEADERS) + + # Then yield the data rows + for observation in queryset.iterator(chunk_size=2000): + try: + row = prepare_row_data( + observation, + is_admin, + user_municipality_ids + ) + yield writer.writerow(row) + except Exception as e: + logger.error(f"Error processing observation {observation.id}: {e}") + continue \ No newline at end of file diff --git a/vespadb/observations/tasks/generate_export.py b/vespadb/observations/tasks/generate_export.py index 02ce2d3..1e40805 100644 --- a/vespadb/observations/tasks/generate_export.py +++ b/vespadb/observations/tasks/generate_export.py @@ -1,124 +1,16 @@ -import csv -import logging -from datetime import datetime, timedelta -from typing import Optional, Dict, Any, List, Set, Iterator +from celery import shared_task from django.core.cache import cache -from django.db import models, transaction +from django.db import transaction from django.utils import timezone -from celery import shared_task -from vespadb.observations.models import Observation, Export +from datetime import timedelta +import logging +from typing import Dict, Any, Optional, Set from vespadb.users.models import VespaUser as User -from vespadb.observations.serializers import user_read_fields, public_read_fields +from vespadb.observations.models import Export +from .export_utils import CSV_HEADERS, prepare_row_data logger = logging.getLogger(__name__) -CSV_HEADERS = [ - "id", "created_datetime", "modified_datetime", "latitude", "longitude", - "source", "source_id", "nest_height", "nest_size", "nest_location", - "nest_type", "observation_datetime", "province", "eradication_date", - "municipality", "images", "anb_domain", "notes", "eradication_result", - "wn_id", "wn_validation_status", "nest_status" -] - -class Echo: - """An object that implements just the write method of the file-like interface.""" - def write(self, value): - """Write the value by returning it, instead of storing in a buffer.""" - return value - -def get_status(observation: Observation) -> str: - """Get observation status string.""" - if observation.eradication_result: - return "eradicated" - if observation.reserved_by: - return "reserved" - return "untreated" - -def _prepare_row_data( - observation: Observation, - is_admin: bool, - user_municipality_ids: Set[str] -) -> List[str]: - """ - Prepare a single row of data for the CSV export with error handling. - """ - try: - # Determine allowed fields based on permissions - if is_admin or (observation.municipality_id in user_municipality_ids): - allowed_fields = user_read_fields - else: - allowed_fields = public_read_fields - - allowed_fields.extend(["source_id", "latitude", "longitude", "anb_domain", "nest_status"]) - - row_data = [] - for field in CSV_HEADERS: - try: - if field not in allowed_fields: - row_data.append("") - continue - - if field == "latitude": - row_data.append(str(observation.location.y) if observation.location else "") - elif field == "longitude": - row_data.append(str(observation.location.x) if observation.location else "") - elif field in ["created_datetime", "modified_datetime", "observation_datetime"]: - datetime_val = getattr(observation, field, None) - if datetime_val: - datetime_val = datetime_val.replace(microsecond=0) - row_data.append(datetime_val.isoformat() + "Z") - else: - row_data.append("") - elif field == "province": - row_data.append(observation.province.name if observation.province else "") - elif field == "municipality": - row_data.append(observation.municipality.name if observation.municipality else "") - elif field == "anb_domain": - row_data.append(str(observation.anb)) - elif field == "nest_status": - row_data.append(get_status(observation)) - elif field == "source_id": - row_data.append(str(observation.source_id) if observation.source_id is not None else "") - else: - value = getattr(observation, field, "") - row_data.append(str(value) if value is not None else "") - except Exception as e: - logger.warning(f"Error processing field {field} for observation {observation.id}: {str(e)}") - row_data.append("") - - return row_data - except Exception as e: - logger.error(f"Error preparing row data for observation {observation.id}: {str(e)}") - return [""] * len(CSV_HEADERS) - -def parse_boolean(value: str) -> bool: - """ - Convert a string value to a boolean. - """ - if isinstance(value, bool): - return value - if isinstance(value, str): - value_lower = value.lower() - if value_lower in {"true", "1"}: - return True - elif value_lower in {"false", "0"}: - return False - raise ValueError(f"Invalid boolean value: {value}") - -def generate_rows(queryset, is_admin: bool, user_municipality_ids: set) -> Iterator[List[str]]: - """Generate rows for CSV streaming.""" - # First yield the headers - yield CSV_HEADERS - - # Then yield the data rows - for observation in queryset: - try: - row = _prepare_row_data(observation, is_admin, user_municipality_ids) - yield row - except Exception as e: - logger.error(f"Error processing observation {observation.id}: {str(e)}") - continue - @shared_task( name="generate_export", max_retries=3, @@ -137,33 +29,9 @@ def generate_export(export_id: int, filters: Dict[str, Any], user_id: Optional[i export.status = 'processing' export.save() - # Clean and validate filters before applying - valid_fields = {field.name: field for field in Observation._meta.get_fields()} - processed_filters = {} - - # Log the incoming filters - logger.info(f"Processing filters: {filters}") - - for key, value in filters.items(): - # Skip pagination and ordering parameters - if key in ['page', 'page_size', 'ordering']: - continue - - if key in valid_fields: - field = valid_fields[key] - try: - if isinstance(field, models.BooleanField): - processed_filters[key] = parse_boolean(value) - elif value: # Only add non-empty values - processed_filters[key] = value - except ValueError as e: - logger.warning(f"Skipping invalid filter {key}: {value}, error: {e}") - continue - - logger.info(f"Processed filters: {processed_filters}") - + from ..models import Observation # Import here to avoid circular imports # Apply filters and get initial count - queryset = Observation.objects.filter(**processed_filters) + queryset = Observation.objects.filter(**filters) initial_count = queryset.count() logger.info(f"Initial queryset count: {initial_count}") @@ -177,8 +45,9 @@ def generate_export(export_id: int, filters: Dict[str, Any], user_id: Optional[i processed = 0 rows = [CSV_HEADERS] # Start with headers + # Get user permissions is_admin = False - user_municipality_ids = set() + user_municipality_ids: Set[str] = set() if user_id: try: user = User.objects.get(id=user_id) @@ -194,7 +63,7 @@ def generate_export(export_id: int, filters: Dict[str, Any], user_id: Optional[i for observation in batch: try: - row = _prepare_row_data(observation, is_admin, user_municipality_ids) + row = prepare_row_data(observation, is_admin, user_municipality_ids) batch_rows.append(row) processed += 1 @@ -235,7 +104,7 @@ def generate_export(export_id: int, filters: Dict[str, Any], user_id: Optional[i export.error_message = str(e) export.save() raise - + @shared_task def cleanup_old_exports() -> None: """Clean up exports older than 24 hours.""" diff --git a/vespadb/observations/views.py b/vespadb/observations/views.py index ede1019..111a110 100644 --- a/vespadb/observations/views.py +++ b/vespadb/observations/views.py @@ -8,12 +8,16 @@ import logging import csv import json -from typing import TYPE_CHECKING, Any, Union, Optional -from django.conf import settings -from django.http import FileResponse, HttpResponseNotFound +from typing import TYPE_CHECKING, Any, Union +from django.http import HttpResponseNotFound import os -from django.conf import settings +from typing import Iterator, Set +from django.db.models.query import QuerySet +from django.db.models import Model +from csv import writer as _writer +from django.db.models.query import QuerySet +from django.views.decorators.csrf import csrf_exempt from django.contrib.gis.db.models.functions import Transform from django.contrib.gis.geos import GEOSGeometry from django.core.cache import cache @@ -68,7 +72,7 @@ class Echo: """An object that implements just the write method of the file-like interface.""" - def write(self, value): + def write(self, value: Any) -> Any: """Write the value by returning it, instead of storing in a buffer.""" return value @@ -776,7 +780,69 @@ def download_export(self, request: HttpRequest) -> Union[StreamingHttpResponse, except Exception as e: logger.error(f"Error streaming export: {str(e)}") return HttpResponseServerError("Error generating export") + + @method_decorator(csrf_exempt) + @action(detail=False, methods=['get'], permission_classes=[AllowAny]) + def export_direct(self, request: HttpRequest) -> Union[StreamingHttpResponse, JsonResponse]: + """Stream observations directly as CSV without using Celery.""" + try: + # Initialize the filterset with request parameters + filterset = self.filterset_class( + data=request.GET, + queryset=self.get_queryset().select_related( + 'province', + 'municipality' + ) + ) + + if not filterset.is_valid(): + return JsonResponse({"error": filterset.errors}, status=400) + + # Get filtered queryset + queryset = filterset.qs + + # Check count + total_count = queryset.count() + if total_count > 10000: + return JsonResponse({ + "error": f"Export too large. Found {total_count} records, maximum allowed is 10,000" + }, status=400) + + # Determine user permissions + is_admin = request.user.is_authenticated and request.user.is_superuser + user_municipality_ids = set() + if request.user.is_authenticated: + user_municipality_ids = set( + request.user.municipalities.values_list('id', flat=True) + ) + # Create the streaming response with data from the task module + from .tasks.export_utils import generate_rows + pseudo_buffer = Echo() + writer = csv.writer(pseudo_buffer) + + # Stream response with appropriate headers + response = StreamingHttpResponse( + generate_rows(queryset, writer, is_admin, user_municipality_ids), + content_type='text/csv' + ) + + # Set filename with timestamp + timestamp = datetime.datetime.now().strftime('%Y%m%d_%H%M%S') + response['Content-Disposition'] = f'attachment; filename="observations_export_{timestamp}.csv"' + + # Add CORS headers + response["Access-Control-Allow-Origin"] = request.META.get('HTTP_ORIGIN', '*') + response["Access-Control-Allow-Credentials"] = "true" + response["Access-Control-Allow-Methods"] = "GET, OPTIONS" + response["Access-Control-Allow-Headers"] = "Content-Type, Authorization" + + return response + + except Exception as e: + logger.exception("Export failed") + return JsonResponse({"error": str(e)}, status=500) + @require_GET def search_address(request: Request) -> JsonResponse: """