Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add include_in_payload to optimize rest queries where we dont really … #73

Merged
merged 2 commits into from
Nov 11, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ services:
aliases:
- "nfcomposes3"
ports:
- "127.0.0.1:${SEAWEEDFS_DEV_OUTSIDE_PORT:-8001}:8000"
- "127.0.0.1:${SEAWEEDFS_DEV_OUTSIDE_PORT:-8000}:8000"
volumes:
- "./devenv/s3_config/s3.json:/etc/seaweedfs/s3.json"
- "seaweedfs_buckets:/data"
Expand Down
4 changes: 2 additions & 2 deletions skipper/skipper/dataseries/storage/contract/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from rest_framework.generics import get_object_or_404
from rest_framework.request import Request
from rest_framework.serializers import SerializerMetaclass
from typing import Dict, Any, Optional, Union, Tuple
from typing import Dict, Any, Optional, Union, Tuple, List
from uuid import UUID

from skipper.core.serializers.base import BaseSerializer
Expand Down Expand Up @@ -244,7 +244,7 @@ def impl_internal_to_representation(
self,
data_point: Any,
data_series: DataSeries,
external_id_as_dimension_identifier: bool
external_id_as_dimension_identifier: bool,
) -> Dict[str, Any]:
raise NotImplementedError()

Expand Down
2 changes: 2 additions & 0 deletions skipper/skipper/dataseries/storage/contract/view.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@ def get_changes_since(self) -> Optional[datetime.datetime]: ...

def should_include_versions(self) -> bool: ...

def get_include_in_payload(self) -> Optional[List[str]]: ...


class StorageViewAdapter(metaclass=ABCMeta):
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ def create_view_as(

data_sql = data_series_as_sql_table(
data_series,
include_in_payload=None,
data_series_query_info=data_series_query_info,
resolve_dimension_external_ids=identify_dimensions_by_external_id
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ def render_pagination_data_select(include_pagination_data: bool) -> str:


def _data_series_as_sql_table(
include_in_payload: Optional[List[str]],
payload_as_json: bool,
point_in_time: bool,
changes_since: bool,
Expand All @@ -101,6 +102,9 @@ def _data_series_as_sql_table(
and len(_data_series_query_info.dimensions.keys()) > 0

all_select_infos = select_infos(_data_series_query_info)
if include_in_payload is not None:
all_select_infos = [select_info for select_info in all_select_infos if select_info.unescaped_display_id in include_in_payload]

central_table_sql = single_data_series_as_sql_table(
all_select_infos=all_select_infos,
data_series_query_info=_data_series_query_info,
Expand Down Expand Up @@ -222,14 +226,15 @@ def _data_series_as_sql_table(

def data_series_as_sql_table(
data_series: DataSeries,
include_in_payload: List[str],
payload_as_json: bool = False,
point_in_time: bool = False,
changes_since: bool = False,
include_versions: bool = False,
filter_str: str = '',
resolve_dimension_external_ids: bool = False,
data_series_query_info: Optional[DataSeriesQueryInfo] = None,
use_materialized: Optional[bool] = None
use_materialized: Optional[bool] = None,
) -> str:
_data_series_query_info: DataSeriesQueryInfo
if data_series_query_info is None:
Expand All @@ -238,6 +243,7 @@ def data_series_as_sql_table(
_data_series_query_info = data_series_query_info

sql = _data_series_as_sql_table(
include_in_payload=include_in_payload,
payload_as_json=payload_as_json,
point_in_time=point_in_time,
changes_since=changes_since,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def materialize_view_as(schema_name: str, overwrite: bool, data_series_id: str,
)
if not exists_already:
query = f"""CREATE MATERIALIZED VIEW IF NOT EXISTS {schema_name}.{escaped_view_name}
AS {data_series_as_sql_table(data_series)}"""
AS {data_series_as_sql_table(data_series, include_in_payload=None)}"""
query_params: Dict[str, Any] = {select_info.payload_variable_name: select_info.unescaped_display_id for
select_info in select_infos(compute_data_series_query_info(data_series))}
cursor.execute(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,10 @@ def impl_internal_to_representation(
query_params: Dict[str, Any] = {select_info.payload_variable_name: select_info.unescaped_display_id for
select_info in
select_infos(self.get_data_series_children_query_info())}
_view = self.context.get('view')

query_str = data_series_as_sql_table(
data_series=data_series,
include_in_payload=None,
payload_as_json=True,
point_in_time=self.point_in_time is not None,
include_versions=self.should_include_versions,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ def precompute_filter_part(


def raw_display_data_point_query(
include_in_payload: Optional[List[str]],
filter_value: Dict[str, Any],
data_series: DataSeries,
external_id_as_dimension_identifier: bool,
Expand Down Expand Up @@ -205,6 +206,7 @@ def raw_display_data_point_query(
query_params['changes_since'] = changes_since

query_str = data_series_as_sql_table(
include_in_payload=include_in_payload,
data_series=data_series_obj,
payload_as_json=True,
point_in_time=is_point_in_time,
Expand Down Expand Up @@ -255,6 +257,7 @@ def access_object(
return obj

raw_query = raw_display_data_point_query(
include_in_payload=view.get_include_in_payload(),
data_point_id=data_point_id,
filter_value=view.get_filter_value(),
data_series=view.access_data_series(),
Expand Down Expand Up @@ -332,6 +335,7 @@ def encode_last_id_for_pagination(self, view: BaseDataSeries_DataPointViewSet, d
def get_next_page_query_for_pagination(self, view: BaseDataSeries_DataPointViewSet, last_query: str, limit: int,
request: HttpRequest) -> RawQuerySet: # type: ignore
return raw_display_data_point_query(
include_in_payload=view.get_include_in_payload(), # we actually don't need anything in the payload here
start_object=last_query,
limit=limit,
filter_value=view.get_filter_value(),
Expand All @@ -353,6 +357,7 @@ def get_prev_page_query_for_pagination(
) -> Optional[RawQuerySet]: # type: ignore
# FIXME: this could be a simpler query without as many joins
return raw_display_data_point_query(
include_in_payload=view.get_include_in_payload(), # we actually don't need anything in the payload here
start_object=last_query,
reverse=True,
limit=limit,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,7 @@ class FakedFile:
)

_sql = data_series_as_sql_table(
include_in_payload=None,
data_series=_data_series_obj,
payload_as_json=True,
point_in_time=False,
Expand Down
12 changes: 12 additions & 0 deletions skipper/skipper/dataseries/views/datapoint/crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,17 @@ def get_filter_value(self) -> Dict[str, Any]:
else:
filter_value = {}
return filter_value

def get_include_in_payload(self) -> Optional[List[str]]:
if 'include_in_payload' in self.request.GET:
_include_in_payload_str = self.request.GET['include_in_payload']
if _include_in_payload_str != None:
return list(_include_in_payload_str.split(','))
else:
return None
else:
return None


def get_external_ids(self) -> Optional[List[str]]:
return self.request.GET.getlist('external_id') if 'external_id' in self.request.GET else None # type: ignore
Expand Down Expand Up @@ -275,6 +286,7 @@ def get_description_string(self) -> str:
- count[=true] <br>
- external_id=<str> (repeatable) <br>
- identify_dimensions_by_external_id[=true] <br>
- include_in_payload=<str> (comma separated list of dimension/fact external ids to include in the payload) <br>
"""
if _history:
doc_string = f"""
Expand Down
1 change: 1 addition & 0 deletions skipper/skipper/dataseries/views/metamodel/cube_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ def get(self, request: Request, **kwargs: str) -> HttpResponse:

sql_template = data_series_as_sql_table(
data_series,
include_in_payload=None,
payload_as_json=payload_as_json(request),
resolve_dimension_external_ids=use_external_id_as_dimension_identifier(
cast(Dict[str, Any], request.GET)
Expand Down
Loading