From 2bbddd58b7915e1afc88e8896c12bc4ab6cdf1d3 Mon Sep 17 00:00:00 2001 From: Alexander Barannikov Date: Fri, 27 Dec 2024 16:45:51 +0000 Subject: [PATCH 01/13] OPIK-672 [SDK] Fix cyclic reference/recursion issue in json_encoder --- sdks/python/src/opik/jsonable_encoder.py | 33 +++++++++++++++--------- 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/sdks/python/src/opik/jsonable_encoder.py b/sdks/python/src/opik/jsonable_encoder.py index 8fb76d77ac..de3b8acc90 100644 --- a/sdks/python/src/opik/jsonable_encoder.py +++ b/sdks/python/src/opik/jsonable_encoder.py @@ -1,12 +1,11 @@ -import logging import dataclasses import datetime as dt - -from typing import Callable, Any, Type, Set, Tuple - +import logging from enum import Enum from pathlib import PurePath from types import GeneratorType +from typing import Any, Callable, Optional, Set, Tuple, Type + import pydantic import opik.rest_api.core.datetime_utils as datetime_utils @@ -25,18 +24,28 @@ def register_encoder_extension(obj_type: Type, encoder: Callable[[Any], Any]) -> _ENCODER_EXTENSIONS.add((obj_type, encoder)) -def jsonable_encoder(obj: Any) -> Any: +def jsonable_encoder(obj: Any, seen: Optional[Set[int]] = None) -> Any: """ This is a modified version of the serializer generated by Fern in rest_api.core.jsonable_encoder. The code is simplified to serialize complex objects into a textual representation. """ + if seen is None: + seen = set() + + obj_id = id(obj) + if obj_id in seen: + return None + + if hasattr(obj, "__dict__"): + seen.add(obj_id) + try: if dataclasses.is_dataclass(obj) or isinstance(obj, pydantic.BaseModel): obj_dict = obj.__dict__ - return jsonable_encoder(obj_dict) + return jsonable_encoder(obj_dict, seen) if isinstance(obj, Enum): - return jsonable_encoder(obj.value) + return jsonable_encoder(obj.value, seen) if isinstance(obj, PurePath): return str(obj) if isinstance(obj, (str, int, float, type(None))): @@ -50,22 +59,22 @@ def jsonable_encoder(obj: Any) -> Any: allowed_keys = set(obj.keys()) for key, value in obj.items(): if key in allowed_keys: - encoded_key = jsonable_encoder(key) - encoded_value = jsonable_encoder(value) + encoded_key = jsonable_encoder(key, seen) + encoded_value = jsonable_encoder(value, seen) encoded_dict[encoded_key] = encoded_value return encoded_dict if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)): encoded_list = [] for item in obj: - encoded_list.append(jsonable_encoder(item)) + encoded_list.append(jsonable_encoder(item, seen)) return encoded_list for type_, encoder in _ENCODER_EXTENSIONS: if isinstance(obj, type_): - return jsonable_encoder(encoder(obj)) + return jsonable_encoder(encoder(obj), seen) if np is not None and isinstance(obj, np.ndarray): - return jsonable_encoder(obj.tolist()) + return jsonable_encoder(obj.tolist(), seen) except Exception: LOGGER.debug("Failed to serialize object.", exc_info=True) From 6510ae902da11b6a55bf3eecc683ab6f3883fbb3 Mon Sep 17 00:00:00 2001 From: Alexander Barannikov Date: Mon, 30 Dec 2024 14:39:11 +0000 Subject: [PATCH 02/13] avoid infinite recursion in cyclic structures --- sdks/python/src/opik/jsonable_encoder.py | 16 +++- .../test_jsonable_encoder.py | 73 ++++++++++++++++++- 2 files changed, 83 insertions(+), 6 deletions(-) diff --git a/sdks/python/src/opik/jsonable_encoder.py b/sdks/python/src/opik/jsonable_encoder.py index de3b8acc90..ffecd1ee29 100644 --- a/sdks/python/src/opik/jsonable_encoder.py +++ b/sdks/python/src/opik/jsonable_encoder.py @@ -28,15 +28,16 @@ def jsonable_encoder(obj: Any, seen: Optional[Set[int]] = None) -> Any: """ This is a modified version of the serializer generated by Fern in rest_api.core.jsonable_encoder. The code is simplified to serialize complex objects into a textual representation. + It also handles cyclic references to avoid infinite recursion. """ if seen is None: seen = set() - obj_id = id(obj) - if obj_id in seen: - return None - if hasattr(obj, "__dict__"): + obj_id = id(obj) + if obj_id in seen: + LOGGER.debug(f"Found cyclic reference to {type(obj).__name__} id={obj_id}") + return f"" seen.add(obj_id) try: @@ -79,6 +80,13 @@ def jsonable_encoder(obj: Any, seen: Optional[Set[int]] = None) -> Any: except Exception: LOGGER.debug("Failed to serialize object.", exc_info=True) + finally: + # Once done encoding this object, remove from `seen`, + # so the same object can appear again at a sibling branch. + if hasattr(obj, "__dict__"): + obj_id = id(obj) + seen.remove(obj_id) + data = str(obj) return data diff --git a/sdks/python/tests/unit/message_processing/test_jsonable_encoder.py b/sdks/python/tests/unit/message_processing/test_jsonable_encoder.py index 73e0879b59..f436e4b976 100644 --- a/sdks/python/tests/unit/message_processing/test_jsonable_encoder.py +++ b/sdks/python/tests/unit/message_processing/test_jsonable_encoder.py @@ -1,14 +1,83 @@ -from typing import Any +import dataclasses from datetime import date, datetime, timezone from threading import Lock +from typing import Any, Optional import numpy as np import pytest -import dataclasses import opik.jsonable_encoder as jsonable_encoder +@dataclasses.dataclass +class Node: + value: int + child: Optional["Node"] = None + + +def test_jsonable_encoder__cyclic_reference(): + """ + Test that the encoder detects cyclic references and does not infinitely recurse. + """ + # Create a simple two-node cycle: A -> B -> A + node_a = Node(value=1) + node_b = Node(value=2) + node_a.child = node_b + node_b.child = node_a + + encoded = jsonable_encoder.jsonable_encoder(node_a) + # The exact format of the cycle marker can vary; we check that: + # 1. We get some structure for node_a (like a dict). + # 2. Inside node_a, there's a reference to node_b (a dict). + # 3. Inside node_b, there's a "cyclic reference" marker instead of a full node_a object. + print("=" * 150) + print(encoded) + assert isinstance(encoded, dict) + assert "value" in encoded + assert "child" in encoded + + # node_a.child (which is node_b) should be a dict + assert isinstance(encoded["child"], dict) + assert "value" in encoded["child"] + assert "child" in encoded["child"] + + # node_b.child should be the cycle marker + cycle_marker = encoded["child"]["child"] + print("=" * 150) + print(cycle_marker) + assert isinstance( + cycle_marker, str + ), "Expected a string marker for cyclic reference" + assert ( + " Date: Fri, 3 Jan 2025 12:30:20 +0000 Subject: [PATCH 03/13] log kickoff + execute_task --- .../integrations/aisuite/aisuite_decorator.py | 11 +- .../src/opik/integrations/crewai/__init__.py | 4 + .../integrations/crewai/crewai_decorator.py | 143 ++++++++++++++++++ .../opik/integrations/crewai/opik_tracker.py | 30 ++++ .../src/opik/integrations/dspy/__init__.py | 3 + 5 files changed, 186 insertions(+), 5 deletions(-) create mode 100644 sdks/python/src/opik/integrations/crewai/__init__.py create mode 100644 sdks/python/src/opik/integrations/crewai/crewai_decorator.py create mode 100644 sdks/python/src/opik/integrations/crewai/opik_tracker.py diff --git a/sdks/python/src/opik/integrations/aisuite/aisuite_decorator.py b/sdks/python/src/opik/integrations/aisuite/aisuite_decorator.py index 6d886de15a..57146d6d7b 100644 --- a/sdks/python/src/opik/integrations/aisuite/aisuite_decorator.py +++ b/sdks/python/src/opik/integrations/aisuite/aisuite_decorator.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Callable, Dict, List, Optional, Tuple +from typing import Any, AsyncGenerator, Callable, Dict, Generator, List, Optional, Tuple, Union import aisuite.framework as aisuite_chat_completion from openai.types.chat import chat_completion as openai_chat_completion @@ -7,7 +7,6 @@ from opik import dict_utils from opik.decorator import arguments_helpers, base_track_decorator - LOGGER = logging.getLogger(__name__) KWARGS_KEYS_TO_LOG_AS_INPUTS = ["messages"] @@ -131,6 +130,8 @@ def _generators_handler( self, output: Any, capture_output: bool, - generations_aggregator: Optional[Callable[[List[Any]], Any]], - ) -> None: - return None + generations_aggregator: Optional[Callable[[List[Any]], str]], + ) -> Optional[Union[Generator, AsyncGenerator]]: + return super()._generators_handler( + output, capture_output, generations_aggregator + ) diff --git a/sdks/python/src/opik/integrations/crewai/__init__.py b/sdks/python/src/opik/integrations/crewai/__init__.py new file mode 100644 index 0000000000..6621f7d135 --- /dev/null +++ b/sdks/python/src/opik/integrations/crewai/__init__.py @@ -0,0 +1,4 @@ +from .opik_tracker import track_crewai + + +__all__ = ["track_crewai"] diff --git a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py new file mode 100644 index 0000000000..8ee87833d8 --- /dev/null +++ b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py @@ -0,0 +1,143 @@ +import logging +from typing import Any, AsyncGenerator, Callable, Dict, Generator, List, Optional, Tuple, Union + +from opik import dict_utils +from opik.decorator import arguments_helpers, base_track_decorator + +LOGGER = logging.getLogger(__name__) + +AGENT_KWARGS_KEYS_TO_LOG_AS_INPUTS = [ + # "agent_executor", + # "allow_delegation", + "backstory", + # "cache", + # "cache_handler", + # "crew", + # "formatting_errors", + "goal", + # "i18n", + # "id", + "llm", + # "max_iter", + # "max_rpm", + # "max_tokens", + "role", + "tools", + # "tools_handler", + # "verbose", +] + +TASK_KWARGS_KEYS_TO_LOG_AS_INPUTS = [ + # 'agent', + # 'async_execution', + # 'callback', + 'config', + 'context', + # 'converter_cls', + # 'delegations', + 'description', + 'expected_output', + 'human_input', + # 'i18n', + # 'id', + 'name', + # 'output', + # 'output_file', + # 'output_json', + # 'output_pydantic', + 'processed_by_agents', + 'prompt_context', + 'tools', + # 'tools_errors', + # 'used_tools', +] + + +class CrewAITrackDecorator(base_track_decorator.BaseTrackDecorator): + def _start_span_inputs_preprocessor( + self, + func: Callable, + track_options: arguments_helpers.TrackOptions, + args: Optional[Tuple], + kwargs: Optional[Dict[str, Any]], + ) -> arguments_helpers.StartSpanParameters: + + name = track_options.name if track_options.name is not None else func.__name__ + metadata = track_options.metadata if track_options.metadata is not None else {} + + metadata.update({ + "created_from": "crewai", + }) + + tags = ["crewai"] + # qq = str(func) + + # PARSE INPUT + if name == "kickoff": + input = kwargs.get("inputs") + elif name == "execute_task": + input = {} + input["context"] = kwargs.get("context") + + # task_dict = kwargs["task"].model_dump() + # task_dict2, _ = dict_utils.split_dict_by_keys(task_dict, TASK_KWARGS_KEYS_TO_LOG_AS_INPUTS) + task_dict = kwargs["task"].model_dump(include=TASK_KWARGS_KEYS_TO_LOG_AS_INPUTS) + input["task"] = task_dict + + # agent_dict = task_dict["agent"] + # agent_dict2, _ = dict_utils.split_dict_by_keys(agent_dict, AGENT_KWARGS_KEYS_TO_LOG_AS_INPUTS) + agent_dict = kwargs["task"].agent.model_dump(include=AGENT_KWARGS_KEYS_TO_LOG_AS_INPUTS) + agent_dict["llm"] = str(agent_dict["llm"]) + input["agent"] = agent_dict + name = f"{name}: {input['task']['name']}" + + usage = None + model = None + provider = None + + result = arguments_helpers.StartSpanParameters( + name=name, + input=input, + type=track_options.type, + tags=tags, + metadata=metadata, + project_name=track_options.project_name, + model=model, + provider=provider, + ) + + return result + + def _end_span_inputs_preprocessor( + self, + output: Any, + capture_output: bool, + ) -> arguments_helpers.EndSpanParameters: + + usage = None + model = None + + if not isinstance(output, str): + output = output.model_dump() + usage = output.pop("token_usage", None) + else: + output = {"output": output} + + result = arguments_helpers.EndSpanParameters( + output=output, + usage=usage, + # metadata=metadata, + model=model, + ) + + return result + + def _generators_handler( + self, + output: Any, + capture_output: bool, + generations_aggregator: Optional[Callable[[List[Any]], str]], + ) -> Optional[Union[Generator, AsyncGenerator]]: + return super()._generators_handler( + output, capture_output, generations_aggregator + ) diff --git a/sdks/python/src/opik/integrations/crewai/opik_tracker.py b/sdks/python/src/opik/integrations/crewai/opik_tracker.py new file mode 100644 index 0000000000..1f8532cb80 --- /dev/null +++ b/sdks/python/src/opik/integrations/crewai/opik_tracker.py @@ -0,0 +1,30 @@ +from typing import Optional + +import crewai + +from . import crewai_decorator + + +def track_crewai( + project_name: Optional[str] = None, +) -> None: + decorator_factory = crewai_decorator.CrewAITrackDecorator() + + kickoff_decorator = decorator_factory.track( + # type="llm", + # name="chat_completion_create", + project_name=project_name, + ) + + # todo + # Crew.kickoff, + # Crew.kickoff_for_each, + # Crew.kickoff_async, + # Crew.kickoff_for_each_async, + # Agent.execute_task, + # Task.execute_sync, + + crewai.Crew.kickoff = kickoff_decorator(crewai.Crew.kickoff) + crewai.Agent.execute_task = kickoff_decorator(crewai.Agent.execute_task) + + return None diff --git a/sdks/python/src/opik/integrations/dspy/__init__.py b/sdks/python/src/opik/integrations/dspy/__init__.py index e69de29bb2..4086879ebe 100644 --- a/sdks/python/src/opik/integrations/dspy/__init__.py +++ b/sdks/python/src/opik/integrations/dspy/__init__.py @@ -0,0 +1,3 @@ +from .callback import OpikCallback + +__all__ = ["OpikCallback"] From d1581632455d1c2761a90b8f6c134ac8962e3e4d Mon Sep 17 00:00:00 2001 From: Alexander Barannikov Date: Mon, 6 Jan 2025 13:34:42 +0000 Subject: [PATCH 04/13] PoC with token usage info --- .../integrations/crewai/crewai_decorator.py | 103 ++++++++++++++++-- .../opik/integrations/crewai/opik_tracker.py | 49 +++++++-- 2 files changed, 131 insertions(+), 21 deletions(-) diff --git a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py index 8ee87833d8..110cc8efdf 100644 --- a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py +++ b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py @@ -1,9 +1,12 @@ import logging from typing import Any, AsyncGenerator, Callable, Dict, Generator, List, Optional, Tuple, Union -from opik import dict_utils +from opik import dict_utils, opik_context from opik.decorator import arguments_helpers, base_track_decorator +from crewai.tasks import TaskOutput + + LOGGER = logging.getLogger(__name__) AGENT_KWARGS_KEYS_TO_LOG_AS_INPUTS = [ @@ -53,6 +56,35 @@ ] + +from litellm.integrations.custom_logger import CustomLogger +from litellm.types.utils import Usage +# from crewai.agents.agent_builder.utilities.base_token_process import TokenProcess + + +class OpikTokenCalcHandler(CustomLogger): + # def __init__(self, token_cost_process: TokenProcess): + # self.token_cost_process = token_cost_process + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._event_kwargs = None + self._response_obj = None + self._start_time = None + self._end_time = None + + + def log_success_event(self, kwargs, response_obj, start_time, end_time): + print("*** CALLBACK: OpikTokenCalcHandler") + self._event_kwargs = kwargs + self._response_obj = response_obj + self._start_time = start_time + self._end_time = end_time + + +def opik_callback(*args, **kwargs): + print("*** CALLBACK: opik_callback") + + class CrewAITrackDecorator(base_track_decorator.BaseTrackDecorator): def _start_span_inputs_preprocessor( self, @@ -65,33 +97,69 @@ def _start_span_inputs_preprocessor( name = track_options.name if track_options.name is not None else func.__name__ metadata = track_options.metadata if track_options.metadata is not None else {} + if "args" in metadata: + print() + metadata.update({ "created_from": "crewai", + "args": args, + "kwargs": kwargs, + # "token_handler": None, }) tags = ["crewai"] - # qq = str(func) + ###################### # PARSE INPUT + ###################### + # Crew if name == "kickoff": input = kwargs.get("inputs") + metadata.update({ + "object_type": "crew", + }) + + # todo set callbacks for + for agent in args[0].agents: + agent.step_callback = opik_callback + token_usage_callback = OpikTokenCalcHandler() + agent.agent_executor.callbacks = [token_usage_callback] + agent.agent_executor.callbacks + # todo put this handler to metadata and use it in end_span() + metadata["token_usage_callback"] = token_usage_callback + # move this handler to AGENT section + + # for task in args[0].tasks: + # task.callback = opik_callback + + # Agent elif name == "execute_task": input = {} input["context"] = kwargs.get("context") - # task_dict = kwargs["task"].model_dump() - # task_dict2, _ = dict_utils.split_dict_by_keys(task_dict, TASK_KWARGS_KEYS_TO_LOG_AS_INPUTS) task_dict = kwargs["task"].model_dump(include=TASK_KWARGS_KEYS_TO_LOG_AS_INPUTS) input["task"] = task_dict - # agent_dict = task_dict["agent"] - # agent_dict2, _ = dict_utils.split_dict_by_keys(agent_dict, AGENT_KWARGS_KEYS_TO_LOG_AS_INPUTS) agent_dict = kwargs["task"].agent.model_dump(include=AGENT_KWARGS_KEYS_TO_LOG_AS_INPUTS) agent_dict["llm"] = str(agent_dict["llm"]) input["agent"] = agent_dict - name = f"{name}: {input['task']['name']}" - usage = None + # name = f"{name}: {input['task']['name']}" + metadata.update({ + "object_type": "agent", + }) + + # Task + elif name == "execute_sync": + input = {} + # name = f"{name}: {args[0].name}" + metadata.update({ + "object_type": "task", + }) + + else: + raise NotImplementedError + + # usage = None model = None provider = None @@ -117,11 +185,24 @@ def _end_span_inputs_preprocessor( usage = None model = None - if not isinstance(output, str): + current_span = opik_context.get_current_span_data() + + if isinstance(output, TaskOutput): + output = {"output": output.raw} + elif isinstance(output, str): + output = {"output": output} + else: output = output.model_dump() usage = output.pop("token_usage", None) - else: - output = {"output": output} + + if current_span.metadata.get("object_type") == "agent": + opik_callback_handler = current_span.metadata['kwargs']['task'].agent.agent_executor.callbacks[0] + if opik_callback_handler._response_obj: + usage = opik_callback_handler._response_obj.model_dump()["usage"] + + current_span.metadata.pop('args') + current_span.metadata.pop('kwargs') + current_span.metadata.pop('token_usage_callback', None) result = arguments_helpers.EndSpanParameters( output=output, diff --git a/sdks/python/src/opik/integrations/crewai/opik_tracker.py b/sdks/python/src/opik/integrations/crewai/opik_tracker.py index 1f8532cb80..adaf082f9e 100644 --- a/sdks/python/src/opik/integrations/crewai/opik_tracker.py +++ b/sdks/python/src/opik/integrations/crewai/opik_tracker.py @@ -1,3 +1,4 @@ +import functools from typing import Optional import crewai @@ -11,20 +12,48 @@ def track_crewai( decorator_factory = crewai_decorator.CrewAITrackDecorator() kickoff_decorator = decorator_factory.track( - # type="llm", - # name="chat_completion_create", project_name=project_name, ) - # todo - # Crew.kickoff, - # Crew.kickoff_for_each, - # Crew.kickoff_async, - # Crew.kickoff_for_each_async, - # Agent.execute_task, - # Task.execute_sync, - crewai.Crew.kickoff = kickoff_decorator(crewai.Crew.kickoff) + crewai.Crew.kickoff_for_each = kickoff_decorator(crewai.Crew.kickoff_for_each) + crewai.Crew.kickoff_async = kickoff_decorator(crewai.Crew.kickoff_async) + crewai.Crew.kickoff_for_each_async = kickoff_decorator(crewai.Crew.kickoff_for_each_async) + crewai.Agent.execute_task = kickoff_decorator(crewai.Agent.execute_task) + crewai.Agent.create_agent_executor = create_agent_executor_wrapper(crewai.Agent.create_agent_executor) + + crewai.Task.execute_sync = kickoff_decorator(crewai.Task.execute_sync) + crewai.Task.execute_async = kickoff_decorator(crewai.Task.execute_async) return None + + +def create_agent_executor_wrapper(method): + @functools.wraps(method) + def wrapped_method(*args, **kwargs): + print("*** create_agent_executor_wrapper BEGIN ***") + print(args[0].role.strip()) + # print(args) + # print(kwargs) + + opik_obj = None + + if args[0].agent_executor and len(args[0].agent_executor.callbacks) > 1: + for callback in args[0].agent_executor.callbacks: + if isinstance(callback, crewai_decorator.OpikTokenCalcHandler): + opik_obj = callback + # todo reset token usage info? + break + + result = method(*args, **kwargs) + + if opik_obj is not None: + args[0].agent_executor.callbacks = [opik_obj] + args[0].agent_executor.callbacks + print("*** create_agent_executor_wrapper SET WRAPPER ***") + + print("*** create_agent_executor_wrapper END ***") + return result + + return wrapped_method + From 86847dc1fccd1f008532e4ea8299c3e5383c2423 Mon Sep 17 00:00:00 2001 From: Alexander Barannikov Date: Mon, 6 Jan 2025 18:34:07 +0000 Subject: [PATCH 05/13] wip: sometimes not logging the last token usage --- .../integrations/crewai/crewai_decorator.py | 96 ++++++++++++------- .../opik/integrations/crewai/opik_tracker.py | 4 + 2 files changed, 68 insertions(+), 32 deletions(-) diff --git a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py index 110cc8efdf..7f22e92cf9 100644 --- a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py +++ b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py @@ -58,8 +58,9 @@ from litellm.integrations.custom_logger import CustomLogger -from litellm.types.utils import Usage +# from litellm.types.utils import Usage # from crewai.agents.agent_builder.utilities.base_token_process import TokenProcess +import pprint class OpikTokenCalcHandler(CustomLogger): @@ -71,18 +72,33 @@ def __init__(self, *args, **kwargs): self._response_obj = None self._start_time = None self._end_time = None + self.model = None + self.provider = None + self.usage = None def log_success_event(self, kwargs, response_obj, start_time, end_time): - print("*** CALLBACK: OpikTokenCalcHandler") - self._event_kwargs = kwargs - self._response_obj = response_obj - self._start_time = start_time - self._end_time = end_time + try: + print("*** CALLBACK: OpikTokenCalcHandler") + self._event_kwargs = kwargs + self._response_obj = response_obj + self._start_time = start_time + self._end_time = end_time + self.model = response_obj.model + self.provider = "openai" if response_obj.object == 'chat.completion' else None + self.usage = response_obj.model_dump().get("usage") + except Exception as e: + print(e) -def opik_callback(*args, **kwargs): - print("*** CALLBACK: opik_callback") + pprint.pprint(self.usage) + + if self.usage is None: + print("*** USAGE IS NONE!") + + +# def opik_callback(*args, **kwargs): +# print("*** CALLBACK: opik_callback") class CrewAITrackDecorator(base_track_decorator.BaseTrackDecorator): @@ -104,7 +120,6 @@ def _start_span_inputs_preprocessor( "created_from": "crewai", "args": args, "kwargs": kwargs, - # "token_handler": None, }) tags = ["crewai"] @@ -119,31 +134,37 @@ def _start_span_inputs_preprocessor( "object_type": "crew", }) - # todo set callbacks for - for agent in args[0].agents: - agent.step_callback = opik_callback - token_usage_callback = OpikTokenCalcHandler() - agent.agent_executor.callbacks = [token_usage_callback] + agent.agent_executor.callbacks - # todo put this handler to metadata and use it in end_span() - metadata["token_usage_callback"] = token_usage_callback - # move this handler to AGENT section + # # todo set callbacks for + # for agent in args[0].agents: + # agent.step_callback = opik_callback + # token_usage_callback = OpikTokenCalcHandler() + # agent.agent_executor.callbacks = [token_usage_callback] + agent.agent_executor.callbacks + # # todo put this handler to metadata and use it in end_span() + # metadata["token_usage_callback"] = token_usage_callback + # # move this handler to AGENT section # for task in args[0].tasks: # task.callback = opik_callback # Agent elif name == "execute_task": + assert kwargs['task'].agent == args[0] + agent = args[0] + token_usage_callback = OpikTokenCalcHandler() + agent.agent_executor.callbacks = [token_usage_callback] + agent.agent_executor.callbacks + input = {} input["context"] = kwargs.get("context") - task_dict = kwargs["task"].model_dump(include=TASK_KWARGS_KEYS_TO_LOG_AS_INPUTS) - input["task"] = task_dict + # task_dict = kwargs["task"].model_dump(include=TASK_KWARGS_KEYS_TO_LOG_AS_INPUTS) + # input["task"] = task_dict agent_dict = kwargs["task"].agent.model_dump(include=AGENT_KWARGS_KEYS_TO_LOG_AS_INPUTS) agent_dict["llm"] = str(agent_dict["llm"]) input["agent"] = agent_dict # name = f"{name}: {input['task']['name']}" + name = agent.role.strip() metadata.update({ "object_type": "agent", }) @@ -151,7 +172,7 @@ def _start_span_inputs_preprocessor( # Task elif name == "execute_sync": input = {} - # name = f"{name}: {args[0].name}" + name = f"Task.{name}: {args[0].name}" metadata.update({ "object_type": "task", }) @@ -160,8 +181,8 @@ def _start_span_inputs_preprocessor( raise NotImplementedError # usage = None - model = None - provider = None + # model = None + # provider = None result = arguments_helpers.StartSpanParameters( name=name, @@ -170,8 +191,8 @@ def _start_span_inputs_preprocessor( tags=tags, metadata=metadata, project_name=track_options.project_name, - model=model, - provider=provider, + # model=model, + # provider=provider, ) return result @@ -184,6 +205,7 @@ def _end_span_inputs_preprocessor( usage = None model = None + provider = None current_span = opik_context.get_current_span_data() @@ -192,23 +214,33 @@ def _end_span_inputs_preprocessor( elif isinstance(output, str): output = {"output": output} else: - output = output.model_dump() - usage = output.pop("token_usage", None) + # output = output.model_dump() + # usage = output.pop("token_usage", None) + output = {"output": None} if current_span.metadata.get("object_type") == "agent": + assert current_span.metadata['kwargs']['task'].agent == current_span.metadata['args'][0] opik_callback_handler = current_span.metadata['kwargs']['task'].agent.agent_executor.callbacks[0] - if opik_callback_handler._response_obj: - usage = opik_callback_handler._response_obj.model_dump()["usage"] + # opik_callback_handler = current_span.metadata['args'][0].agent_executor.callbacks[0] + # if opik_callback_handler._response_obj: + # usage = opik_callback_handler._response_obj.model_dump()["usage"] + if opik_callback_handler: + model = opik_callback_handler.model + provider = opik_callback_handler.provider + usage = opik_callback_handler.usage - current_span.metadata.pop('args') - current_span.metadata.pop('kwargs') - current_span.metadata.pop('token_usage_callback', None) + metadata = current_span.metadata + + metadata.pop('args') + metadata.pop('kwargs') + metadata.pop('token_usage_callback', None) result = arguments_helpers.EndSpanParameters( output=output, usage=usage, - # metadata=metadata, + metadata=metadata, model=model, + provider=provider, ) return result diff --git a/sdks/python/src/opik/integrations/crewai/opik_tracker.py b/sdks/python/src/opik/integrations/crewai/opik_tracker.py index adaf082f9e..474f4cef2f 100644 --- a/sdks/python/src/opik/integrations/crewai/opik_tracker.py +++ b/sdks/python/src/opik/integrations/crewai/opik_tracker.py @@ -4,6 +4,7 @@ import crewai from . import crewai_decorator +from ...decorator import inspect_helpers def track_crewai( @@ -30,6 +31,9 @@ def track_crewai( def create_agent_executor_wrapper(method): + if inspect_helpers.is_async(method): + print("*** create_agent_executor_wrapper ASYNC ***") + @functools.wraps(method) def wrapped_method(*args, **kwargs): print("*** create_agent_executor_wrapper BEGIN ***") From d25cc1d4391cd7199da103340effb888b25a6511 Mon Sep 17 00:00:00 2001 From: Alexander Barannikov Date: Mon, 6 Jan 2025 21:13:58 +0000 Subject: [PATCH 06/13] add integration test draft --- .../integrations/crewai/crewai_decorator.py | 86 ++++------------- .../opik/integrations/crewai/opik_tracker.py | 12 --- .../library_integration/crewai/__init__.py | 0 .../crewai/config/agents.yaml | 20 ++++ .../crewai/config/tasks.yaml | 19 ++++ .../tests/library_integration/crewai/crew.py | 47 +++++++++ .../crewai/requirements.txt | 2 + .../library_integration/crewai/test_crewai.py | 95 +++++++++++++++++++ 8 files changed, 200 insertions(+), 81 deletions(-) create mode 100644 sdks/python/tests/library_integration/crewai/__init__.py create mode 100644 sdks/python/tests/library_integration/crewai/config/agents.yaml create mode 100644 sdks/python/tests/library_integration/crewai/config/tasks.yaml create mode 100644 sdks/python/tests/library_integration/crewai/crew.py create mode 100644 sdks/python/tests/library_integration/crewai/requirements.txt create mode 100644 sdks/python/tests/library_integration/crewai/test_crewai.py diff --git a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py index 7f22e92cf9..deb6411cbb 100644 --- a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py +++ b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py @@ -1,11 +1,10 @@ import logging from typing import Any, AsyncGenerator, Callable, Dict, Generator, List, Optional, Tuple, Union -from opik import dict_utils, opik_context -from opik.decorator import arguments_helpers, base_track_decorator - from crewai.tasks import TaskOutput +from opik import opik_context +from opik.decorator import arguments_helpers, base_track_decorator LOGGER = logging.getLogger(__name__) @@ -20,7 +19,7 @@ "goal", # "i18n", # "id", - "llm", + # "llm", # "max_iter", # "max_rpm", # "max_tokens", @@ -40,7 +39,7 @@ # 'delegations', 'description', 'expected_output', - 'human_input', + # 'human_input', # 'i18n', # 'id', 'name', @@ -48,7 +47,7 @@ # 'output_file', # 'output_json', # 'output_pydantic', - 'processed_by_agents', + # 'processed_by_agents', 'prompt_context', 'tools', # 'tools_errors', @@ -58,47 +57,24 @@ from litellm.integrations.custom_logger import CustomLogger -# from litellm.types.utils import Usage -# from crewai.agents.agent_builder.utilities.base_token_process import TokenProcess -import pprint class OpikTokenCalcHandler(CustomLogger): - # def __init__(self, token_cost_process: TokenProcess): - # self.token_cost_process = token_cost_process def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._event_kwargs = None self._response_obj = None - self._start_time = None - self._end_time = None self.model = None self.provider = None self.usage = None def log_success_event(self, kwargs, response_obj, start_time, end_time): - try: - print("*** CALLBACK: OpikTokenCalcHandler") - self._event_kwargs = kwargs - self._response_obj = response_obj - self._start_time = start_time - self._end_time = end_time - self.model = response_obj.model - self.provider = "openai" if response_obj.object == 'chat.completion' else None - self.usage = response_obj.model_dump().get("usage") - except Exception as e: - print(e) - - - pprint.pprint(self.usage) - - if self.usage is None: - print("*** USAGE IS NONE!") - - -# def opik_callback(*args, **kwargs): -# print("*** CALLBACK: opik_callback") + self._event_kwargs = kwargs + self._response_obj = response_obj + self.model = response_obj.model + self.provider = "openai" if response_obj.object == 'chat.completion' else None + self.usage = response_obj.model_dump().get("usage") class CrewAITrackDecorator(base_track_decorator.BaseTrackDecorator): @@ -113,9 +89,6 @@ def _start_span_inputs_preprocessor( name = track_options.name if track_options.name is not None else func.__name__ metadata = track_options.metadata if track_options.metadata is not None else {} - if "args" in metadata: - print() - metadata.update({ "created_from": "crewai", "args": args, @@ -134,18 +107,6 @@ def _start_span_inputs_preprocessor( "object_type": "crew", }) - # # todo set callbacks for - # for agent in args[0].agents: - # agent.step_callback = opik_callback - # token_usage_callback = OpikTokenCalcHandler() - # agent.agent_executor.callbacks = [token_usage_callback] + agent.agent_executor.callbacks - # # todo put this handler to metadata and use it in end_span() - # metadata["token_usage_callback"] = token_usage_callback - # # move this handler to AGENT section - - # for task in args[0].tasks: - # task.callback = opik_callback - # Agent elif name == "execute_task": assert kwargs['task'].agent == args[0] @@ -155,15 +116,9 @@ def _start_span_inputs_preprocessor( input = {} input["context"] = kwargs.get("context") - - # task_dict = kwargs["task"].model_dump(include=TASK_KWARGS_KEYS_TO_LOG_AS_INPUTS) - # input["task"] = task_dict - - agent_dict = kwargs["task"].agent.model_dump(include=AGENT_KWARGS_KEYS_TO_LOG_AS_INPUTS) - agent_dict["llm"] = str(agent_dict["llm"]) + agent_dict = agent.model_dump(include=AGENT_KWARGS_KEYS_TO_LOG_AS_INPUTS) input["agent"] = agent_dict - # name = f"{name}: {input['task']['name']}" name = agent.role.strip() metadata.update({ "object_type": "agent", @@ -172,7 +127,10 @@ def _start_span_inputs_preprocessor( # Task elif name == "execute_sync": input = {} - name = f"Task.{name}: {args[0].name}" + task_dict = args[0].model_dump(include=TASK_KWARGS_KEYS_TO_LOG_AS_INPUTS) + input["task"] = task_dict + + name = f"Task: {args[0].name}" metadata.update({ "object_type": "task", }) @@ -180,10 +138,6 @@ def _start_span_inputs_preprocessor( else: raise NotImplementedError - # usage = None - # model = None - # provider = None - result = arguments_helpers.StartSpanParameters( name=name, input=input, @@ -191,8 +145,6 @@ def _start_span_inputs_preprocessor( tags=tags, metadata=metadata, project_name=track_options.project_name, - # model=model, - # provider=provider, ) return result @@ -216,14 +168,10 @@ def _end_span_inputs_preprocessor( else: # output = output.model_dump() # usage = output.pop("token_usage", None) - output = {"output": None} + output = {} if current_span.metadata.get("object_type") == "agent": - assert current_span.metadata['kwargs']['task'].agent == current_span.metadata['args'][0] - opik_callback_handler = current_span.metadata['kwargs']['task'].agent.agent_executor.callbacks[0] - # opik_callback_handler = current_span.metadata['args'][0].agent_executor.callbacks[0] - # if opik_callback_handler._response_obj: - # usage = opik_callback_handler._response_obj.model_dump()["usage"] + opik_callback_handler = current_span.metadata['args'][0].agent_executor.callbacks[0] if opik_callback_handler: model = opik_callback_handler.model provider = opik_callback_handler.provider diff --git a/sdks/python/src/opik/integrations/crewai/opik_tracker.py b/sdks/python/src/opik/integrations/crewai/opik_tracker.py index 474f4cef2f..a31db34555 100644 --- a/sdks/python/src/opik/integrations/crewai/opik_tracker.py +++ b/sdks/python/src/opik/integrations/crewai/opik_tracker.py @@ -4,7 +4,6 @@ import crewai from . import crewai_decorator -from ...decorator import inspect_helpers def track_crewai( @@ -31,32 +30,21 @@ def track_crewai( def create_agent_executor_wrapper(method): - if inspect_helpers.is_async(method): - print("*** create_agent_executor_wrapper ASYNC ***") - @functools.wraps(method) def wrapped_method(*args, **kwargs): - print("*** create_agent_executor_wrapper BEGIN ***") - print(args[0].role.strip()) - # print(args) - # print(kwargs) - opik_obj = None if args[0].agent_executor and len(args[0].agent_executor.callbacks) > 1: for callback in args[0].agent_executor.callbacks: if isinstance(callback, crewai_decorator.OpikTokenCalcHandler): opik_obj = callback - # todo reset token usage info? break result = method(*args, **kwargs) if opik_obj is not None: args[0].agent_executor.callbacks = [opik_obj] + args[0].agent_executor.callbacks - print("*** create_agent_executor_wrapper SET WRAPPER ***") - print("*** create_agent_executor_wrapper END ***") return result return wrapped_method diff --git a/sdks/python/tests/library_integration/crewai/__init__.py b/sdks/python/tests/library_integration/crewai/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdks/python/tests/library_integration/crewai/config/agents.yaml b/sdks/python/tests/library_integration/crewai/config/agents.yaml new file mode 100644 index 0000000000..7a167288bf --- /dev/null +++ b/sdks/python/tests/library_integration/crewai/config/agents.yaml @@ -0,0 +1,20 @@ +# src/latest_ai_development/config/agents.yaml +researcher: + role: > + {topic} Senior Data Researcher + goal: > + Uncover cutting-edge developments in {topic} + backstory: > + You're a seasoned researcher with a knack for uncovering the latest + developments in {topic}. Known for your ability to find the most relevant + information and present it in a clear and concise manner. + +reporting_analyst: + role: > + {topic} Reporting Analyst + goal: > + Create detailed reports based on {topic} data analysis and research findings + backstory: > + You're a meticulous analyst with a keen eye for detail. You're known for + your ability to turn complex data into clear and concise reports, making + it easy for others to understand and act on the information you provide. diff --git a/sdks/python/tests/library_integration/crewai/config/tasks.yaml b/sdks/python/tests/library_integration/crewai/config/tasks.yaml new file mode 100644 index 0000000000..628880855f --- /dev/null +++ b/sdks/python/tests/library_integration/crewai/config/tasks.yaml @@ -0,0 +1,19 @@ +# src/latest_ai_development/config/tasks.yaml +research_task: + description: > + Conduct a thorough research about {topic} + Make sure you find any interesting and relevant information given + the current year is 2024. + expected_output: > + A list with 2 bullet points of the most relevant information about {topic} + agent: researcher + +reporting_task: + description: > + Review the context you got and expand each topic into a small section for a report. +# Make sure the report is detailed and contains any and all relevant information. + expected_output: > + A fully fledge reports with the mains topics, each with a small section of information. + Formatted as markdown without '```' + agent: reporting_analyst + output_file: report.md diff --git a/sdks/python/tests/library_integration/crewai/crew.py b/sdks/python/tests/library_integration/crewai/crew.py new file mode 100644 index 0000000000..4a620771d8 --- /dev/null +++ b/sdks/python/tests/library_integration/crewai/crew.py @@ -0,0 +1,47 @@ +from crewai import Agent, Crew, Process, Task +from crewai.project import CrewBase, agent, crew, task + + +@CrewBase +class LatestAiDevelopmentCrew: + """LatestAiDevelopment crew""" + + @agent + def researcher(self) -> Agent: + return Agent( + config=self.agents_config['researcher'], + verbose=True, + tools=[ + # SerperDevTool() + ] + ) + + @agent + def reporting_analyst(self) -> Agent: + return Agent( + config=self.agents_config['reporting_analyst'], + verbose=True + ) + + @task + def research_task(self) -> Task: + return Task( + config=self.tasks_config['research_task'], + ) + + @task + def reporting_task(self) -> Task: + return Task( + config=self.tasks_config['reporting_task'], + output_file='output/report.md' # This is the file that will be contain the final report. + ) + + @crew + def crew(self) -> Crew: + """Creates the LatestAiDevelopment crew""" + return Crew( + agents=self.agents, # Automatically created by the @agent decorator + tasks=self.tasks, # Automatically created by the @task decorator + process=Process.sequential, + verbose=True, + ) diff --git a/sdks/python/tests/library_integration/crewai/requirements.txt b/sdks/python/tests/library_integration/crewai/requirements.txt new file mode 100644 index 0000000000..e1de2bf464 --- /dev/null +++ b/sdks/python/tests/library_integration/crewai/requirements.txt @@ -0,0 +1,2 @@ +crewai +crewai-tools diff --git a/sdks/python/tests/library_integration/crewai/test_crewai.py b/sdks/python/tests/library_integration/crewai/test_crewai.py new file mode 100644 index 0000000000..ae83cae49a --- /dev/null +++ b/sdks/python/tests/library_integration/crewai/test_crewai.py @@ -0,0 +1,95 @@ +from typing import Union + +import pytest + +import opik +from opik import context_storage +from opik.api_objects import opik_client, span, trace +from opik.api_objects.opik_client import get_client_cached +from opik.config import OPIK_PROJECT_DEFAULT_NAME + +from .crew import LatestAiDevelopmentCrew +from opik.integrations.crewai import track_crewai +from ...testlib import ( + ANY_BUT_NONE, + ANY_DICT, + ANY_STRING, + SpanModel, + TraceModel, + assert_equal, +) + + + +import litellm +litellm.set_verbose=True + + +def test_crewai__happyflow( + fake_backend, +): + project_name = "crewai-integration-test" + + track_crewai(project_name=project_name) + + inputs = { + 'topic': 'AI Agents' + } + c = LatestAiDevelopmentCrew() + c = c.crew() + + c = c.kickoff(inputs=inputs) + + print(c) + + opik_client = get_client_cached() + opik_client.flush() + + EXPECTED_TRACE_TREE = TraceModel( + id=ANY_STRING(), + name="ChainOfThought", + input={"args": (), "kwargs": {"question": "What is the meaning of life?"}}, + output=None, + metadata={"created_from": "dspy"}, + start_time=ANY_BUT_NONE, + end_time=ANY_BUT_NONE, + project_name=project_name, + spans=[ + SpanModel( + id=ANY_STRING(), + type="llm", + name="LM", + provider="openai", + model="gpt-4o-mini", + input=ANY_DICT, + output=ANY_DICT, + metadata=None, + start_time=ANY_BUT_NONE, + end_time=ANY_BUT_NONE, + project_name=project_name, + spans=[], + ), + SpanModel( + id=ANY_STRING(), + type="llm", + name="Predict", + provider=None, + model=None, + input=ANY_DICT, + output=ANY_DICT, + metadata=None, + start_time=ANY_BUT_NONE, + end_time=ANY_BUT_NONE, + project_name=project_name, + spans=[], + ), + ], + ) + + # assert len(fake_backend.trace_trees) == 1 + # assert len(fake_backend.span_trees) == 2 + # + # assert_equal(EXPECTED_TRACE_TREE, fake_backend.trace_trees[0]) + + + From 94411a752cfa63fda16727aac41e1ee9a81b2d7b Mon Sep 17 00:00:00 2001 From: Alexander Barannikov Date: Thu, 9 Jan 2025 12:31:12 +0000 Subject: [PATCH 07/13] decorators revised --- .../integrations/aisuite/aisuite_decorator.py | 12 +- .../integrations/crewai/crewai_decorator.py | 154 ++++++++---------- .../opik/integrations/crewai/opik_tracker.py | 38 +---- 3 files changed, 86 insertions(+), 118 deletions(-) diff --git a/sdks/python/src/opik/integrations/aisuite/aisuite_decorator.py b/sdks/python/src/opik/integrations/aisuite/aisuite_decorator.py index 57146d6d7b..560b9b8625 100644 --- a/sdks/python/src/opik/integrations/aisuite/aisuite_decorator.py +++ b/sdks/python/src/opik/integrations/aisuite/aisuite_decorator.py @@ -1,5 +1,15 @@ import logging -from typing import Any, AsyncGenerator, Callable, Dict, Generator, List, Optional, Tuple, Union +from typing import ( + Any, + AsyncGenerator, + Callable, + Dict, + Generator, + List, + Optional, + Tuple, + Union, +) import aisuite.framework as aisuite_chat_completion from openai.types.chat import chat_completion as openai_chat_completion diff --git a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py index deb6411cbb..798766e8a8 100644 --- a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py +++ b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py @@ -1,10 +1,19 @@ import logging -from typing import Any, AsyncGenerator, Callable, Dict, Generator, List, Optional, Tuple, Union - -from crewai.tasks import TaskOutput +from typing import ( + Any, + AsyncGenerator, + Callable, + Dict, + Generator, + List, + Optional, + Tuple, + Union, +) from opik import opik_context from opik.decorator import arguments_helpers, base_track_decorator +from opik.types import SpanType LOGGER = logging.getLogger(__name__) @@ -33,48 +42,39 @@ # 'agent', # 'async_execution', # 'callback', - 'config', - 'context', + "config", + "context", # 'converter_cls', # 'delegations', - 'description', - 'expected_output', + "description", + "expected_output", # 'human_input', # 'i18n', # 'id', - 'name', + "name", # 'output', # 'output_file', # 'output_json', # 'output_pydantic', # 'processed_by_agents', - 'prompt_context', - 'tools', + "prompt_context", + "tools", # 'tools_errors', # 'used_tools', ] - -from litellm.integrations.custom_logger import CustomLogger - - -class OpikTokenCalcHandler(CustomLogger): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._event_kwargs = None - self._response_obj = None - self.model = None - self.provider = None - self.usage = None - - - def log_success_event(self, kwargs, response_obj, start_time, end_time): - self._event_kwargs = kwargs - self._response_obj = response_obj - self.model = response_obj.model - self.provider = "openai" if response_obj.object == 'chat.completion' else None - self.usage = response_obj.model_dump().get("usage") +TASK_KWARGS_KEYS_TO_LOG_AS_OUTPUT = [ + # 'agent', + # 'description', + # 'expected_output', + # 'json_dict', + "name", + # 'output_format', + # 'pydantic', + "raw", + "summary", +] class CrewAITrackDecorator(base_track_decorator.BaseTrackDecorator): @@ -82,66 +82,48 @@ def _start_span_inputs_preprocessor( self, func: Callable, track_options: arguments_helpers.TrackOptions, - args: Optional[Tuple], - kwargs: Optional[Dict[str, Any]], + args: Tuple, + kwargs: Dict[str, Any], ) -> arguments_helpers.StartSpanParameters: - name = track_options.name if track_options.name is not None else func.__name__ metadata = track_options.metadata if track_options.metadata is not None else {} + span_type: SpanType = "general" - metadata.update({ - "created_from": "crewai", - "args": args, - "kwargs": kwargs, - }) - + metadata["created_from"] = "crewai" tags = ["crewai"] - ###################### - # PARSE INPUT - ###################### # Crew if name == "kickoff": - input = kwargs.get("inputs") - metadata.update({ - "object_type": "crew", - }) + metadata["object_type"] = "crew" + input_dict = kwargs.get("inputs") # Agent elif name == "execute_task": - assert kwargs['task'].agent == args[0] + metadata["object_type"] = "agent" agent = args[0] - token_usage_callback = OpikTokenCalcHandler() - agent.agent_executor.callbacks = [token_usage_callback] + agent.agent_executor.callbacks - - input = {} - input["context"] = kwargs.get("context") + input_dict = {"context": kwargs.get("context")} agent_dict = agent.model_dump(include=AGENT_KWARGS_KEYS_TO_LOG_AS_INPUTS) - input["agent"] = agent_dict - + input_dict["agent"] = agent_dict name = agent.role.strip() - metadata.update({ - "object_type": "agent", - }) # Task elif name == "execute_sync": - input = {} + metadata["object_type"] = "task" + input_dict = {} task_dict = args[0].model_dump(include=TASK_KWARGS_KEYS_TO_LOG_AS_INPUTS) - input["task"] = task_dict - + input_dict["task"] = task_dict name = f"Task: {args[0].name}" - metadata.update({ - "object_type": "task", - }) - else: - raise NotImplementedError + elif name == "completion": + metadata["object_type"] = "completion" + input_dict = {"messages": kwargs.get("messages")} + span_type = "llm" + name = "llm call" result = arguments_helpers.StartSpanParameters( name=name, - input=input, - type=track_options.type, + input=input_dict, + type=span_type, tags=tags, metadata=metadata, project_name=track_options.project_name, @@ -154,35 +136,31 @@ def _end_span_inputs_preprocessor( output: Any, capture_output: bool, ) -> arguments_helpers.EndSpanParameters: - usage = None model = None provider = None + object_type = None + metadata = {} current_span = opik_context.get_current_span_data() - - if isinstance(output, TaskOutput): - output = {"output": output.raw} - elif isinstance(output, str): + if current_span and current_span.metadata: + metadata = current_span.metadata + object_type = metadata.pop("object_type") + + if object_type == "crew": + output_dict = output.model_dump() + _ = output_dict.pop("token_usage") + output = output_dict + elif object_type == "agent": output = {"output": output} - else: - # output = output.model_dump() - # usage = output.pop("token_usage", None) + elif object_type == "task": + output_dict = output.model_dump(include=TASK_KWARGS_KEYS_TO_LOG_AS_OUTPUT) + output = output_dict + elif object_type == "completion": + output_dict = output.model_dump() + usage = output_dict.pop("usage", None) output = {} - if current_span.metadata.get("object_type") == "agent": - opik_callback_handler = current_span.metadata['args'][0].agent_executor.callbacks[0] - if opik_callback_handler: - model = opik_callback_handler.model - provider = opik_callback_handler.provider - usage = opik_callback_handler.usage - - metadata = current_span.metadata - - metadata.pop('args') - metadata.pop('kwargs') - metadata.pop('token_usage_callback', None) - result = arguments_helpers.EndSpanParameters( output=output, usage=usage, diff --git a/sdks/python/src/opik/integrations/crewai/opik_tracker.py b/sdks/python/src/opik/integrations/crewai/opik_tracker.py index a31db34555..6295896d52 100644 --- a/sdks/python/src/opik/integrations/crewai/opik_tracker.py +++ b/sdks/python/src/opik/integrations/crewai/opik_tracker.py @@ -1,14 +1,21 @@ -import functools from typing import Optional import crewai +import litellm from . import crewai_decorator +__IS_TRACKING_ENABLED = False + def track_crewai( project_name: Optional[str] = None, ) -> None: + global __IS_TRACKING_ENABLED + if __IS_TRACKING_ENABLED: + return + __IS_TRACKING_ENABLED = True + decorator_factory = crewai_decorator.CrewAITrackDecorator() kickoff_decorator = decorator_factory.track( @@ -17,35 +24,8 @@ def track_crewai( crewai.Crew.kickoff = kickoff_decorator(crewai.Crew.kickoff) crewai.Crew.kickoff_for_each = kickoff_decorator(crewai.Crew.kickoff_for_each) - crewai.Crew.kickoff_async = kickoff_decorator(crewai.Crew.kickoff_async) - crewai.Crew.kickoff_for_each_async = kickoff_decorator(crewai.Crew.kickoff_for_each_async) - crewai.Agent.execute_task = kickoff_decorator(crewai.Agent.execute_task) - crewai.Agent.create_agent_executor = create_agent_executor_wrapper(crewai.Agent.create_agent_executor) - crewai.Task.execute_sync = kickoff_decorator(crewai.Task.execute_sync) - crewai.Task.execute_async = kickoff_decorator(crewai.Task.execute_async) + litellm.completion = kickoff_decorator(litellm.completion) return None - - -def create_agent_executor_wrapper(method): - @functools.wraps(method) - def wrapped_method(*args, **kwargs): - opik_obj = None - - if args[0].agent_executor and len(args[0].agent_executor.callbacks) > 1: - for callback in args[0].agent_executor.callbacks: - if isinstance(callback, crewai_decorator.OpikTokenCalcHandler): - opik_obj = callback - break - - result = method(*args, **kwargs) - - if opik_obj is not None: - args[0].agent_executor.callbacks = [opik_obj] + args[0].agent_executor.callbacks - - return result - - return wrapped_method - From afd1efea436680449b283e1fb88b47fda49e8a54 Mon Sep 17 00:00:00 2001 From: Alexander Barannikov Date: Thu, 9 Jan 2025 16:47:52 +0000 Subject: [PATCH 08/13] rename decorator --- .../src/opik/integrations/crewai/opik_tracker.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/sdks/python/src/opik/integrations/crewai/opik_tracker.py b/sdks/python/src/opik/integrations/crewai/opik_tracker.py index 6295896d52..6a6348f162 100644 --- a/sdks/python/src/opik/integrations/crewai/opik_tracker.py +++ b/sdks/python/src/opik/integrations/crewai/opik_tracker.py @@ -18,14 +18,14 @@ def track_crewai( decorator_factory = crewai_decorator.CrewAITrackDecorator() - kickoff_decorator = decorator_factory.track( + crewai_wrapper = decorator_factory.track( project_name=project_name, ) - crewai.Crew.kickoff = kickoff_decorator(crewai.Crew.kickoff) - crewai.Crew.kickoff_for_each = kickoff_decorator(crewai.Crew.kickoff_for_each) - crewai.Agent.execute_task = kickoff_decorator(crewai.Agent.execute_task) - crewai.Task.execute_sync = kickoff_decorator(crewai.Task.execute_sync) - litellm.completion = kickoff_decorator(litellm.completion) + crewai.Crew.kickoff = crewai_wrapper(crewai.Crew.kickoff) + crewai.Crew.kickoff_for_each = crewai_wrapper(crewai.Crew.kickoff_for_each) + crewai.Agent.execute_task = crewai_wrapper(crewai.Agent.execute_task) + crewai.Task.execute_sync = crewai_wrapper(crewai.Task.execute_sync) + litellm.completion = crewai_wrapper(litellm.completion) return None From 61db57bb6a5d69061b701fb5514633a8fed1064a Mon Sep 17 00:00:00 2001 From: Alexander Barannikov Date: Thu, 9 Jan 2025 18:02:28 +0000 Subject: [PATCH 09/13] separate parsing of input/output --- .../integrations/crewai/crewai_decorator.py | 62 ++++++++++++------- 1 file changed, 38 insertions(+), 24 deletions(-) diff --git a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py index 798766e8a8..355378d444 100644 --- a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py +++ b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py @@ -87,11 +87,26 @@ def _start_span_inputs_preprocessor( ) -> arguments_helpers.StartSpanParameters: name = track_options.name if track_options.name is not None else func.__name__ metadata = track_options.metadata if track_options.metadata is not None else {} - span_type: SpanType = "general" - metadata["created_from"] = "crewai" tags = ["crewai"] + input_dict, name, span_type = self._parse_inputs(args, kwargs, metadata, name) + + result = arguments_helpers.StartSpanParameters( + name=name, + input=input_dict, + type=span_type, + tags=tags, + metadata=metadata, + project_name=track_options.project_name, + ) + + return result + + def _parse_inputs(self, args: Tuple, kwargs: Dict, metadata: Dict, name: str) -> Tuple[Dict, str, SpanType]: + span_type: SpanType = "general" + input_dict = {} + # Crew if name == "kickoff": metadata["object_type"] = "crew" @@ -120,25 +135,13 @@ def _start_span_inputs_preprocessor( span_type = "llm" name = "llm call" - result = arguments_helpers.StartSpanParameters( - name=name, - input=input_dict, - type=span_type, - tags=tags, - metadata=metadata, - project_name=track_options.project_name, - ) - - return result + return input_dict, name, span_type def _end_span_inputs_preprocessor( self, output: Any, capture_output: bool, ) -> arguments_helpers.EndSpanParameters: - usage = None - model = None - provider = None object_type = None metadata = {} @@ -147,6 +150,23 @@ def _end_span_inputs_preprocessor( metadata = current_span.metadata object_type = metadata.pop("object_type") + model, output_dict, provider, usage = self._parse_outputs(object_type, output) + + result = arguments_helpers.EndSpanParameters( + output=output_dict, + usage=usage, + metadata=metadata, + model=model, + provider=provider, + ) + + return result + + def _parse_outputs(self, object_type, output): + model = None + provider = None + usage = None + if object_type == "crew": output_dict = output.model_dump() _ = output_dict.pop("token_usage") @@ -159,17 +179,11 @@ def _end_span_inputs_preprocessor( elif object_type == "completion": output_dict = output.model_dump() usage = output_dict.pop("usage", None) + model = output_dict.pop("model", None) + provider = "openai" if output_dict.get('object') == 'chat.completion' else None output = {} - result = arguments_helpers.EndSpanParameters( - output=output, - usage=usage, - metadata=metadata, - model=model, - provider=provider, - ) - - return result + return model, output, provider, usage def _generators_handler( self, From 38e2dffbbaa14e2f2a7ad171bca96205bb4b6624 Mon Sep 17 00:00:00 2001 From: Alexander Barannikov Date: Thu, 9 Jan 2025 19:48:35 +0000 Subject: [PATCH 10/13] add happyflow integration test --- .../integrations/crewai/crewai_decorator.py | 1 - .../tests/library_integration/crewai/crew.py | 15 +- .../library_integration/crewai/test_crewai.py | 175 ++++++++++++------ 3 files changed, 128 insertions(+), 63 deletions(-) diff --git a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py index 355378d444..d1d65d82f5 100644 --- a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py +++ b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py @@ -63,7 +63,6 @@ # 'used_tools', ] - TASK_KWARGS_KEYS_TO_LOG_AS_OUTPUT = [ # 'agent', # 'description', diff --git a/sdks/python/tests/library_integration/crewai/crew.py b/sdks/python/tests/library_integration/crewai/crew.py index 4a620771d8..ac53fd78f1 100644 --- a/sdks/python/tests/library_integration/crewai/crew.py +++ b/sdks/python/tests/library_integration/crewai/crew.py @@ -9,31 +9,28 @@ class LatestAiDevelopmentCrew: @agent def researcher(self) -> Agent: return Agent( - config=self.agents_config['researcher'], + config=self.agents_config["researcher"], verbose=True, tools=[ # SerperDevTool() - ] + ], ) @agent def reporting_analyst(self) -> Agent: - return Agent( - config=self.agents_config['reporting_analyst'], - verbose=True - ) + return Agent(config=self.agents_config["reporting_analyst"], verbose=True) @task def research_task(self) -> Task: return Task( - config=self.tasks_config['research_task'], + config=self.tasks_config["research_task"], ) @task def reporting_task(self) -> Task: return Task( - config=self.tasks_config['reporting_task'], - output_file='output/report.md' # This is the file that will be contain the final report. + config=self.tasks_config["reporting_task"], + output_file="output/report.md", # This is the file that will be contain the final report. ) @crew diff --git a/sdks/python/tests/library_integration/crewai/test_crewai.py b/sdks/python/tests/library_integration/crewai/test_crewai.py index ae83cae49a..977225fafa 100644 --- a/sdks/python/tests/library_integration/crewai/test_crewai.py +++ b/sdks/python/tests/library_integration/crewai/test_crewai.py @@ -1,15 +1,8 @@ -from typing import Union - -import pytest +import litellm -import opik -from opik import context_storage -from opik.api_objects import opik_client, span, trace from opik.api_objects.opik_client import get_client_cached -from opik.config import OPIK_PROJECT_DEFAULT_NAME - -from .crew import LatestAiDevelopmentCrew from opik.integrations.crewai import track_crewai +from .crew import LatestAiDevelopmentCrew from ...testlib import ( ANY_BUT_NONE, ANY_DICT, @@ -19,10 +12,7 @@ assert_equal, ) - - -import litellm -litellm.set_verbose=True +litellm.set_verbose = True def test_crewai__happyflow( @@ -32,64 +22,143 @@ def test_crewai__happyflow( track_crewai(project_name=project_name) - inputs = { - 'topic': 'AI Agents' - } + inputs = {"topic": "AI Agents"} c = LatestAiDevelopmentCrew() c = c.crew() - - c = c.kickoff(inputs=inputs) - - print(c) + _ = c.kickoff(inputs=inputs) opik_client = get_client_cached() opik_client.flush() EXPECTED_TRACE_TREE = TraceModel( - id=ANY_STRING(), - name="ChainOfThought", - input={"args": (), "kwargs": {"question": "What is the meaning of life?"}}, - output=None, - metadata={"created_from": "dspy"}, - start_time=ANY_BUT_NONE, end_time=ANY_BUT_NONE, + id=ANY_STRING(), + input={'topic': 'AI Agents'}, + metadata={"created_from": "crewai"}, + name="kickoff", + output=ANY_DICT, project_name=project_name, + start_time=ANY_BUT_NONE, + tags=["crewai"], spans=[ SpanModel( - id=ANY_STRING(), - type="llm", - name="LM", - provider="openai", - model="gpt-4o-mini", - input=ANY_DICT, - output=ANY_DICT, - metadata=None, - start_time=ANY_BUT_NONE, end_time=ANY_BUT_NONE, - project_name=project_name, - spans=[], - ), - SpanModel( id=ANY_STRING(), - type="llm", - name="Predict", - provider=None, - model=None, input=ANY_DICT, + metadata={"created_from": "crewai"}, + name="kickoff", output=ANY_DICT, - metadata=None, - start_time=ANY_BUT_NONE, - end_time=ANY_BUT_NONE, project_name=project_name, - spans=[], + start_time=ANY_BUT_NONE, + tags=["crewai"], + type="general", + spans=[ + SpanModel( + end_time=ANY_BUT_NONE, + id=ANY_STRING(), + input=ANY_DICT, + metadata={"created_from": "crewai"}, + name='Task: research_task', + output=ANY_DICT, + project_name=project_name, + start_time=ANY_BUT_NONE, + tags=["crewai"], + spans=[ + SpanModel( + end_time=ANY_BUT_NONE, + id=ANY_STRING(), + input=ANY_DICT, + metadata={"created_from": "crewai"}, + name='AI Agents Senior Data Researcher', + output=ANY_DICT, + project_name=project_name, + start_time=ANY_BUT_NONE, + tags=["crewai"], + type="general", + spans=[ + SpanModel( + end_time=ANY_BUT_NONE, + id=ANY_STRING(), + input=ANY_DICT, + metadata={ + "created_from": "crewai", + "usage": ANY_DICT, + }, + model=ANY_STRING(startswith="gpt-4o-mini"), + name='llm call', + output=ANY_DICT, + project_name=project_name, + provider="openai", + start_time=ANY_BUT_NONE, + tags=["crewai"], + type="llm", + usage={ + "prompt_tokens": ANY_BUT_NONE, + "completion_tokens": ANY_BUT_NONE, + "total_tokens": ANY_BUT_NONE, + }, + spans=[], + ) + ] + ) + ], + ), + SpanModel( + end_time=ANY_BUT_NONE, + id=ANY_STRING(), + input=ANY_DICT, + metadata={"created_from": "crewai"}, + name='Task: reporting_task', + output=ANY_DICT, + project_name=project_name, + start_time=ANY_BUT_NONE, + tags=["crewai"], + spans=[ + SpanModel( + end_time=ANY_BUT_NONE, + id=ANY_STRING(), + input=ANY_DICT, + metadata={"created_from": "crewai"}, + name='AI Agents Reporting Analyst', + output=ANY_DICT, + project_name=project_name, + start_time=ANY_BUT_NONE, + tags=["crewai"], + type="general", + spans=[ + SpanModel( + end_time=ANY_BUT_NONE, + id=ANY_STRING(), + input=ANY_DICT, + metadata={ + "created_from": "crewai", + "usage": ANY_DICT, + }, + model=ANY_STRING(startswith="gpt-4o-mini"), + name='llm call', + output=ANY_DICT, + project_name=project_name, + provider="openai", + start_time=ANY_BUT_NONE, + tags=["crewai"], + type="llm", + usage={ + "prompt_tokens": ANY_BUT_NONE, + "completion_tokens": ANY_BUT_NONE, + "total_tokens": ANY_BUT_NONE, + }, + spans=[], + ) + ] + ) + ], + ), + ], ), ], ) - # assert len(fake_backend.trace_trees) == 1 - # assert len(fake_backend.span_trees) == 2 - # - # assert_equal(EXPECTED_TRACE_TREE, fake_backend.trace_trees[0]) - - + assert len(fake_backend.trace_trees) == 1 + assert len(fake_backend.span_trees) == 1 + assert_equal(EXPECTED_TRACE_TREE, fake_backend.trace_trees[0]) From 6b4e77c60770706b77ea0258c8bf24c54639b7bb Mon Sep 17 00:00:00 2001 From: Alexander Barannikov Date: Thu, 9 Jan 2025 19:58:19 +0000 Subject: [PATCH 11/13] linter fix --- .../integrations/crewai/crewai_decorator.py | 38 +++++++++++++------ .../library_integration/crewai/test_crewai.py | 22 +++++------ 2 files changed, 36 insertions(+), 24 deletions(-) diff --git a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py index d1d65d82f5..5295d19b6c 100644 --- a/sdks/python/src/opik/integrations/crewai/crewai_decorator.py +++ b/sdks/python/src/opik/integrations/crewai/crewai_decorator.py @@ -102,14 +102,20 @@ def _start_span_inputs_preprocessor( return result - def _parse_inputs(self, args: Tuple, kwargs: Dict, metadata: Dict, name: str) -> Tuple[Dict, str, SpanType]: + def _parse_inputs( + self, + args: Tuple, + kwargs: Dict, + metadata: Dict, + name: str, + ) -> Tuple[Dict, str, SpanType]: span_type: SpanType = "general" - input_dict = {} + input_dict: Dict[str, Any] = {} # Crew if name == "kickoff": metadata["object_type"] = "crew" - input_dict = kwargs.get("inputs") + input_dict = kwargs.get("inputs", {}) # Agent elif name == "execute_task": @@ -149,7 +155,7 @@ def _end_span_inputs_preprocessor( metadata = current_span.metadata object_type = metadata.pop("object_type") - model, output_dict, provider, usage = self._parse_outputs(object_type, output) + model, provider, output_dict, usage = self._parse_outputs(object_type, output) result = arguments_helpers.EndSpanParameters( output=output_dict, @@ -161,28 +167,38 @@ def _end_span_inputs_preprocessor( return result - def _parse_outputs(self, object_type, output): + def _parse_outputs( + self, + object_type: Optional[str], + output: Any, + ) -> Tuple[ + Optional[str], + Optional[str], + Dict[str, Any], + Optional[Dict[str, Any]], + ]: model = None provider = None usage = None + output_dict = {} if object_type == "crew": output_dict = output.model_dump() _ = output_dict.pop("token_usage") - output = output_dict elif object_type == "agent": - output = {"output": output} + output_dict = {"output": output} elif object_type == "task": output_dict = output.model_dump(include=TASK_KWARGS_KEYS_TO_LOG_AS_OUTPUT) - output = output_dict elif object_type == "completion": output_dict = output.model_dump() usage = output_dict.pop("usage", None) model = output_dict.pop("model", None) - provider = "openai" if output_dict.get('object') == 'chat.completion' else None - output = {} + provider = ( + "openai" if output_dict.get("object") == "chat.completion" else None + ) + output_dict = {} - return model, output, provider, usage + return model, provider, output_dict, usage def _generators_handler( self, diff --git a/sdks/python/tests/library_integration/crewai/test_crewai.py b/sdks/python/tests/library_integration/crewai/test_crewai.py index 977225fafa..e6a98449b7 100644 --- a/sdks/python/tests/library_integration/crewai/test_crewai.py +++ b/sdks/python/tests/library_integration/crewai/test_crewai.py @@ -1,5 +1,3 @@ -import litellm - from opik.api_objects.opik_client import get_client_cached from opik.integrations.crewai import track_crewai from .crew import LatestAiDevelopmentCrew @@ -12,8 +10,6 @@ assert_equal, ) -litellm.set_verbose = True - def test_crewai__happyflow( fake_backend, @@ -33,7 +29,7 @@ def test_crewai__happyflow( EXPECTED_TRACE_TREE = TraceModel( end_time=ANY_BUT_NONE, id=ANY_STRING(), - input={'topic': 'AI Agents'}, + input={"topic": "AI Agents"}, metadata={"created_from": "crewai"}, name="kickoff", output=ANY_DICT, @@ -58,7 +54,7 @@ def test_crewai__happyflow( id=ANY_STRING(), input=ANY_DICT, metadata={"created_from": "crewai"}, - name='Task: research_task', + name="Task: research_task", output=ANY_DICT, project_name=project_name, start_time=ANY_BUT_NONE, @@ -69,7 +65,7 @@ def test_crewai__happyflow( id=ANY_STRING(), input=ANY_DICT, metadata={"created_from": "crewai"}, - name='AI Agents Senior Data Researcher', + name="AI Agents Senior Data Researcher", output=ANY_DICT, project_name=project_name, start_time=ANY_BUT_NONE, @@ -85,7 +81,7 @@ def test_crewai__happyflow( "usage": ANY_DICT, }, model=ANY_STRING(startswith="gpt-4o-mini"), - name='llm call', + name="llm call", output=ANY_DICT, project_name=project_name, provider="openai", @@ -99,7 +95,7 @@ def test_crewai__happyflow( }, spans=[], ) - ] + ], ) ], ), @@ -108,7 +104,7 @@ def test_crewai__happyflow( id=ANY_STRING(), input=ANY_DICT, metadata={"created_from": "crewai"}, - name='Task: reporting_task', + name="Task: reporting_task", output=ANY_DICT, project_name=project_name, start_time=ANY_BUT_NONE, @@ -119,7 +115,7 @@ def test_crewai__happyflow( id=ANY_STRING(), input=ANY_DICT, metadata={"created_from": "crewai"}, - name='AI Agents Reporting Analyst', + name="AI Agents Reporting Analyst", output=ANY_DICT, project_name=project_name, start_time=ANY_BUT_NONE, @@ -135,7 +131,7 @@ def test_crewai__happyflow( "usage": ANY_DICT, }, model=ANY_STRING(startswith="gpt-4o-mini"), - name='llm call', + name="llm call", output=ANY_DICT, project_name=project_name, provider="openai", @@ -149,7 +145,7 @@ def test_crewai__happyflow( }, spans=[], ) - ] + ], ) ], ), From 4e3dbdbb1001795774c00c00c1a93560f3b83dea Mon Sep 17 00:00:00 2001 From: Alexander Barannikov Date: Thu, 9 Jan 2025 20:23:33 +0000 Subject: [PATCH 12/13] add git actions/workflow --- .github/workflows/lib-crewai-tests.yml | 51 +++++++++++++++++++ .../lib-integration-tests-runner.yml | 7 +++ 2 files changed, 58 insertions(+) create mode 100644 .github/workflows/lib-crewai-tests.yml diff --git a/.github/workflows/lib-crewai-tests.yml b/.github/workflows/lib-crewai-tests.yml new file mode 100644 index 0000000000..18556980b5 --- /dev/null +++ b/.github/workflows/lib-crewai-tests.yml @@ -0,0 +1,51 @@ +# Workflow to run CrewAI tests +# +# Please read inputs to provide correct values. +# +name: SDK Lib CrewAI Tests +run-name: "SDK Lib CrewAI Tests ${{ github.ref_name }} by @${{ github.actor }}" +env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + OPENAI_ORG_ID: ${{ secrets.OPENAI_ORG_ID }} +on: + workflow_call: + +jobs: + tests: + name: CrewAI Python ${{matrix.python_version}} + runs-on: ubuntu-latest + defaults: + run: + working-directory: sdks/python + + strategy: + fail-fast: true + matrix: + python_version: ["3.9", "3.10", "3.11", "3.12"] + + steps: + - name: Check out code + uses: actions/checkout@v4 + + - name: Setup Python ${{matrix.python_version}} + uses: actions/setup-python@v5 + with: + python-version: ${{matrix.python_version}} + + - name: Install opik + run: pip install . + + - name: Install test tools + run: | + cd ./tests + pip install --no-cache-dir --disable-pip-version-check -r test_requirements.txt + + - name: Install lib + run: | + cd ./tests + pip install --no-cache-dir --disable-pip-version-check -r library_integration/crewai/requirements.txt + + - name: Run tests + run: | + cd ./tests/library_integration/crewai/ + python -m pytest -vv . \ No newline at end of file diff --git a/.github/workflows/lib-integration-tests-runner.yml b/.github/workflows/lib-integration-tests-runner.yml index 4bf3d12b7e..75ee7a7a45 100644 --- a/.github/workflows/lib-integration-tests-runner.yml +++ b/.github/workflows/lib-integration-tests-runner.yml @@ -19,6 +19,7 @@ on: - haystack - guardrails - dspy + - crewai schedule: - cron: "0 0 */1 * *" pull_request: @@ -94,3 +95,9 @@ jobs: if: contains(fromJSON('["dspy", "all"]'), needs.init_environment.outputs.LIBS) uses: ./.github/workflows/lib-dspy-tests.yml secrets: inherit + + crewai_tests: + needs: [init_environment] + if: contains(fromJSON('["crewai", "all"]'), needs.init_environment.outputs.LIBS) + uses: ./.github/workflows/lib-crewai-tests.yml + secrets: inherit From b6d60281cbd96bbae3786da240895bc36ffda7d8 Mon Sep 17 00:00:00 2001 From: Alexander Barannikov Date: Thu, 9 Jan 2025 20:33:03 +0000 Subject: [PATCH 13/13] remove unsupported version --- .github/workflows/lib-crewai-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lib-crewai-tests.yml b/.github/workflows/lib-crewai-tests.yml index 18556980b5..5915f8ecef 100644 --- a/.github/workflows/lib-crewai-tests.yml +++ b/.github/workflows/lib-crewai-tests.yml @@ -21,7 +21,7 @@ jobs: strategy: fail-fast: true matrix: - python_version: ["3.9", "3.10", "3.11", "3.12"] + python_version: ["3.10", "3.11", "3.12"] steps: - name: Check out code