Skip to content

Commit

Permalink
c
Browse files Browse the repository at this point in the history
  • Loading branch information
Rachel Chen authored and Rachel Chen committed Jan 22, 2025
1 parent da017ea commit 71afed1
Show file tree
Hide file tree
Showing 5 changed files with 59 additions and 60 deletions.
2 changes: 1 addition & 1 deletion src/sentry/api/endpoints/organization_spans_fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def get(self, request: Request, organization: Organization) -> Response:
limit=max_span_tags,
offset=0,
type=(
AttributeKey.Type.TYPE_FLOAT
AttributeKey.Type.TYPE_DOUBLE
if serialized["type"] == "number"
else AttributeKey.Type.TYPE_STRING
),
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/search/eap/columns.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def proto_type(self) -> AttributeKey.Type.ValueType:
see: https://www.notion.so/sentry/Should-count-return-an-int-in-the-v1-RPC-API-1348b10e4b5d80498bfdead194cc304e
"""
return constants.FLOAT
return constants.DOUBLE


def simple_sentry_field(field) -> ResolvedColumn:
Expand Down
51 changes: 25 additions & 26 deletions src/sentry/search/eap/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,39 +46,38 @@

STRING = AttributeKey.TYPE_STRING
BOOLEAN = AttributeKey.TYPE_BOOLEAN
FLOAT = AttributeKey.TYPE_FLOAT
DOUBLE = AttributeKey.TYPE_DOUBLE
INT = AttributeKey.TYPE_INT

# TODO: we need a datetime type
# Maps search types back to types for the proto
TYPE_MAP: dict[SearchType, AttributeKey.Type.ValueType] = {
"bit": FLOAT,
"byte": FLOAT,
"kibibyte": FLOAT,
"mebibyte": FLOAT,
"gibibyte": FLOAT,
"tebibyte": FLOAT,
"pebibyte": FLOAT,
"exbibyte": FLOAT,
"kilobyte": FLOAT,
"megabyte": FLOAT,
"gigabyte": FLOAT,
"terabyte": FLOAT,
"petabyte": FLOAT,
"exabyte": FLOAT,
"nanosecond": FLOAT,
"microsecond": FLOAT,
"millisecond": FLOAT,
"second": FLOAT,
"minute": FLOAT,
"hour": FLOAT,
"day": FLOAT,
"week": FLOAT,
"duration": FLOAT,
"bit": DOUBLE,
"byte": DOUBLE,
"kibibyte": DOUBLE,
"mebibyte": DOUBLE,
"gibibyte": DOUBLE,
"tebibyte": DOUBLE,
"pebibyte": DOUBLE,
"exbibyte": DOUBLE,
"kilobyte": DOUBLE,
"megabyte": DOUBLE,
"gigabyte": DOUBLE,
"terabyte": DOUBLE,
"petabyte": DOUBLE,
"exabyte": DOUBLE,
"nanosecond": DOUBLE,
"microsecond": DOUBLE,
"millisecond": DOUBLE,
"second": DOUBLE,
"minute": DOUBLE,
"hour": DOUBLE,
"day": DOUBLE,
"week": DOUBLE,
"duration": DOUBLE,
"integer": INT,
"number": FLOAT,
"percentage": FLOAT,
"number": DOUBLE,
"percentage": DOUBLE,
"string": STRING,
"boolean": BOOLEAN,
}
Expand Down
8 changes: 4 additions & 4 deletions src/sentry/search/eap/resolver.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
AttributeAggregation,
AttributeKey,
AttributeValue,
FloatArray,
DoubleArray,
IntArray,
StrArray,
VirtualColumnContext,
Expand Down Expand Up @@ -405,18 +405,18 @@ def _resolve_search_value(
)
elif isinstance(value, (float, int)):
return AttributeValue(val_int=int(value))
elif column_type == constants.FLOAT:
elif column_type == constants.DOUBLE:
if operator in constants.IN_OPERATORS:
if isinstance(value, list):
return AttributeValue(
val_float_array=FloatArray(values=[val for val in value])
val_double_array=DoubleArray(values=[val for val in value])
)
else:
raise InvalidSearchQuery(
f"{value} is not a valid value for doing an IN filter"
)
elif isinstance(value, float):
return AttributeValue(val_float=value)
return AttributeValue(val_double=value)
elif column_type == constants.BOOLEAN:
if operator in constants.IN_OPERATORS:
raise InvalidSearchQuery(
Expand Down
56 changes: 28 additions & 28 deletions tests/sentry/search/eap/test_spans.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
AttributeAggregation,
AttributeKey,
AttributeValue,
DoubleArray,
ExtrapolationMode,
FloatArray,
Function,
StrArray,
VirtualColumnContext,
Expand Down Expand Up @@ -62,9 +62,9 @@ def test_numeric_query(self):
where, having, _ = self.resolver.resolve_query("ai.total_tokens.used:123")
assert where == TraceItemFilter(
comparison_filter=ComparisonFilter(
key=AttributeKey(name="ai_total_tokens_used", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="ai_total_tokens_used", type=AttributeKey.Type.TYPE_DOUBLE),
op=ComparisonFilter.OP_EQUALS,
value=AttributeValue(val_float=123),
value=AttributeValue(val_double=123),
)
)
assert having is None
Expand Down Expand Up @@ -110,9 +110,9 @@ def test_in_numeric_filter(self):
where, having, _ = self.resolver.resolve_query("ai.total_tokens.used:[123,456,789]")
assert where == TraceItemFilter(
comparison_filter=ComparisonFilter(
key=AttributeKey(name="ai_total_tokens_used", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="ai_total_tokens_used", type=AttributeKey.Type.TYPE_DOUBLE),
op=ComparisonFilter.OP_IN,
value=AttributeValue(val_float_array=FloatArray(values=[123, 456, 789])),
value=AttributeValue(val_double_array=DoubleArray(values=[123, 456, 789])),
)
)
assert having is None
Expand All @@ -121,9 +121,9 @@ def test_greater_than_numeric_filter(self):
where, having, _ = self.resolver.resolve_query("ai.total_tokens.used:>123")
assert where == TraceItemFilter(
comparison_filter=ComparisonFilter(
key=AttributeKey(name="ai_total_tokens_used", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="ai_total_tokens_used", type=AttributeKey.Type.TYPE_DOUBLE),
op=ComparisonFilter.OP_GREATER_THAN,
value=AttributeValue(val_float=123),
value=AttributeValue(val_double=123),
)
)
assert having is None
Expand Down Expand Up @@ -265,7 +265,7 @@ def test_simple_aggregate_query(self):
aggregation=AttributeAggregation(
aggregate=Function.FUNCTION_COUNT,
key=AttributeKey(
name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT
name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE
),
label="count()",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
Expand All @@ -291,7 +291,7 @@ def test_simple_negation_aggregate_query(self):
aggregation=AttributeAggregation(
aggregate=Function.FUNCTION_COUNT,
key=AttributeKey(
name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT
name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE
),
label="count()",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
Expand All @@ -308,7 +308,7 @@ def test_aggregate_query_on_custom_attributes(self):
comparison_filter=AggregationComparisonFilter(
aggregation=AttributeAggregation(
aggregate=Function.FUNCTION_AVG,
key=AttributeKey(name="foo", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="foo", type=AttributeKey.Type.TYPE_DOUBLE),
label="avg(tags[foo, number])",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
),
Expand All @@ -325,7 +325,7 @@ def test_aggregate_query_on_attributes_with_units(self):
comparison_filter=AggregationComparisonFilter(
aggregation=AttributeAggregation(
aggregate=Function.FUNCTION_AVG,
key=AttributeKey(name="lcp", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="lcp", type=AttributeKey.Type.TYPE_DOUBLE),
label="avg(measurements.lcp)",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
),
Expand All @@ -345,7 +345,7 @@ def test_aggregate_query_with_multiple_conditions(self):
aggregation=AttributeAggregation(
aggregate=Function.FUNCTION_COUNT,
key=AttributeKey(
name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT
name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE
),
label="count()",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
Expand All @@ -358,7 +358,7 @@ def test_aggregate_query_with_multiple_conditions(self):
comparison_filter=AggregationComparisonFilter(
aggregation=AttributeAggregation(
aggregate=Function.FUNCTION_AVG,
key=AttributeKey(name="lcp", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="lcp", type=AttributeKey.Type.TYPE_DOUBLE),
label="avg(measurements.lcp)",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
),
Expand All @@ -381,7 +381,7 @@ def test_aggregate_query_with_multiple_conditions_explicit_and(self):
aggregation=AttributeAggregation(
aggregate=Function.FUNCTION_COUNT,
key=AttributeKey(
name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT
name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE
),
label="count()",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
Expand All @@ -394,7 +394,7 @@ def test_aggregate_query_with_multiple_conditions_explicit_and(self):
comparison_filter=AggregationComparisonFilter(
aggregation=AttributeAggregation(
aggregate=Function.FUNCTION_AVG,
key=AttributeKey(name="lcp", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="lcp", type=AttributeKey.Type.TYPE_DOUBLE),
label="avg(measurements.lcp)",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
),
Expand All @@ -417,7 +417,7 @@ def test_aggregate_query_with_multiple_conditions_explicit_or(self):
aggregation=AttributeAggregation(
aggregate=Function.FUNCTION_COUNT,
key=AttributeKey(
name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT
name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE
),
label="count()",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
Expand All @@ -430,7 +430,7 @@ def test_aggregate_query_with_multiple_conditions_explicit_or(self):
comparison_filter=AggregationComparisonFilter(
aggregation=AttributeAggregation(
aggregate=Function.FUNCTION_AVG,
key=AttributeKey(name="lcp", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="lcp", type=AttributeKey.Type.TYPE_DOUBLE),
label="avg(measurements.lcp)",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
),
Expand Down Expand Up @@ -459,7 +459,7 @@ def test_aggregate_query_with_multiple_conditions_nested(self):
aggregate=Function.FUNCTION_COUNT,
key=AttributeKey(
name="sentry.duration_ms",
type=AttributeKey.Type.TYPE_FLOAT,
type=AttributeKey.Type.TYPE_DOUBLE,
),
label="count()",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
Expand All @@ -474,7 +474,7 @@ def test_aggregate_query_with_multiple_conditions_nested(self):
aggregate=Function.FUNCTION_AVG,
key=AttributeKey(
name="http.response_content_length",
type=AttributeKey.Type.TYPE_FLOAT,
type=AttributeKey.Type.TYPE_DOUBLE,
),
label="avg(http.response_content_length)",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
Expand All @@ -495,7 +495,7 @@ def test_aggregate_query_with_multiple_conditions_nested(self):
aggregate=Function.FUNCTION_COUNT,
key=AttributeKey(
name="sentry.duration_ms",
type=AttributeKey.Type.TYPE_FLOAT,
type=AttributeKey.Type.TYPE_DOUBLE,
),
label="count()",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
Expand All @@ -509,7 +509,7 @@ def test_aggregate_query_with_multiple_conditions_nested(self):
aggregation=AttributeAggregation(
aggregate=Function.FUNCTION_AVG,
key=AttributeKey(
name="lcp", type=AttributeKey.Type.TYPE_FLOAT
name="lcp", type=AttributeKey.Type.TYPE_DOUBLE
),
label="avg(measurements.lcp)",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
Expand Down Expand Up @@ -582,15 +582,15 @@ def test_simple_string_tag(self):
def test_simple_number_tag(self):
resolved_column, virtual_context = self.resolver.resolve_column("tags[foo, number]")
assert resolved_column.proto_definition == AttributeKey(
name="foo", type=AttributeKey.Type.TYPE_FLOAT
name="foo", type=AttributeKey.Type.TYPE_DOUBLE
)
assert virtual_context is None

def test_sum_function(self):
resolved_column, virtual_context = self.resolver.resolve_column("sum(span.self_time)")
assert resolved_column.proto_definition == AttributeAggregation(
aggregate=Function.FUNCTION_SUM,
key=AttributeKey(name="sentry.exclusive_time_ms", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="sentry.exclusive_time_ms", type=AttributeKey.Type.TYPE_DOUBLE),
label="sum(span.self_time)",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
)
Expand All @@ -600,7 +600,7 @@ def test_sum_default_argument(self):
resolved_column, virtual_context = self.resolver.resolve_column("sum()")
assert resolved_column.proto_definition == AttributeAggregation(
aggregate=Function.FUNCTION_SUM,
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE),
label="sum()",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
)
Expand All @@ -610,7 +610,7 @@ def test_function_alias(self):
resolved_column, virtual_context = self.resolver.resolve_column("sum() as test")
assert resolved_column.proto_definition == AttributeAggregation(
aggregate=Function.FUNCTION_SUM,
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE),
label="test",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
)
Expand All @@ -620,15 +620,15 @@ def test_count(self):
resolved_column, virtual_context = self.resolver.resolve_column("count()")
assert resolved_column.proto_definition == AttributeAggregation(
aggregate=Function.FUNCTION_COUNT,
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE),
label="count()",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
)
assert virtual_context is None
resolved_column, virtual_context = self.resolver.resolve_column("count(span.duration)")
assert resolved_column.proto_definition == AttributeAggregation(
aggregate=Function.FUNCTION_COUNT,
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE),
label="count(span.duration)",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
)
Expand All @@ -638,7 +638,7 @@ def test_p50(self):
resolved_column, virtual_context = self.resolver.resolve_column("p50()")
assert resolved_column.proto_definition == AttributeAggregation(
aggregate=Function.FUNCTION_P50,
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT),
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE),
label="p50()",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
)
Expand Down

0 comments on commit 71afed1

Please sign in to comment.