Skip to content

refactor: merge llm client with llm #653

refactor: merge llm client with llm

refactor: merge llm client with llm #653

GitHub Actions / JUnit Test Report failed Jan 9, 2025 in 0s

239 tests run, 225 passed, 9 skipped, 5 failed.

Annotations

Check failure on line 76 in packages/ragbits-core/tests/unit/llms/test_litellm.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_litellm.test_generation

AttributeError: 'LiteLLM' object has no attribute 'client'
Raw output
async def test_generation():
        """Test generation of a response."""
        llm = LiteLLM(api_key="test_key")
        prompt = MockPrompt("Hello, how are you?")
        options = LiteLLMOptions(mock_response="I'm fine, thank you.")
>       output = await llm.generate(prompt, options=options)

packages/ragbits-core/tests/unit/llms/test_litellm.py:76: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
packages/ragbits-core/src/ragbits/core/llms/base.py:125: in generate
    response = await self.generate_raw(prompt, options=options)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ragbits.core.llms.litellm.LiteLLM object at 0x7f15b3dfc3a0>
prompt = <unit.llms.test_litellm.MockPrompt object at 0x7f15b3dfc220>

    async def generate_raw(
        self,
        prompt: BasePrompt,
        *,
        options: LLMClientOptionsT | None = None,
    ) -> str:
        """
        Prepares and sends a prompt to the LLM and returns the raw response (without parsing).
    
        Args:
            prompt: Formatted prompt template with conversation.
            options: Options to use for the LLM client.
    
        Returns:
            Raw text response from LLM.
        """
        merged_options = (self.default_options | options) if options else self.default_options
>       response = await self.client.call(
            conversation=self._format_chat_for_llm(prompt),
            options=merged_options,
            json_mode=prompt.json_mode,
            output_schema=prompt.output_schema(),
        )
E       AttributeError: 'LiteLLM' object has no attribute 'client'

packages/ragbits-core/src/ragbits/core/llms/base.py:83: AttributeError

Check failure on line 85 in packages/ragbits-core/tests/unit/llms/test_litellm.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_litellm.test_generation_with_parser

AttributeError: 'LiteLLM' object has no attribute 'client'
Raw output
async def test_generation_with_parser():
        """Test generation of a response with a parser."""
        llm = LiteLLM(api_key="test_key")
        prompt = MockPromptWithParser("Hello, how are you?")
        options = LiteLLMOptions(mock_response="I'm fine, thank you.")
>       output = await llm.generate(prompt, options=options)

packages/ragbits-core/tests/unit/llms/test_litellm.py:85: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
packages/ragbits-core/src/ragbits/core/llms/base.py:125: in generate
    response = await self.generate_raw(prompt, options=options)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ragbits.core.llms.litellm.LiteLLM object at 0x7f15b3e33790>
prompt = <unit.llms.test_litellm.MockPromptWithParser object at 0x7f15b3e33a90>

    async def generate_raw(
        self,
        prompt: BasePrompt,
        *,
        options: LLMClientOptionsT | None = None,
    ) -> str:
        """
        Prepares and sends a prompt to the LLM and returns the raw response (without parsing).
    
        Args:
            prompt: Formatted prompt template with conversation.
            options: Options to use for the LLM client.
    
        Returns:
            Raw text response from LLM.
        """
        merged_options = (self.default_options | options) if options else self.default_options
>       response = await self.client.call(
            conversation=self._format_chat_for_llm(prompt),
            options=merged_options,
            json_mode=prompt.json_mode,
            output_schema=prompt.output_schema(),
        )
E       AttributeError: 'LiteLLM' object has no attribute 'client'

packages/ragbits-core/src/ragbits/core/llms/base.py:83: AttributeError

Check failure on line 102 in packages/ragbits-core/tests/unit/llms/test_litellm.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_litellm.test_generation_with_static_prompt

AttributeError: 'LiteLLM' object has no attribute 'client'
Raw output
async def test_generation_with_static_prompt():
        """Test generation of a response with a static prompt."""
    
        class StaticPrompt(Prompt):
            """A static prompt."""
    
            user_prompt = "Hello, how are you?"
    
        llm = LiteLLM(api_key="test_key")
        prompt = StaticPrompt()
        options = LiteLLMOptions(mock_response="I'm fine, thank you.")
>       output = await llm.generate(prompt, options=options)

packages/ragbits-core/tests/unit/llms/test_litellm.py:102: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
packages/ragbits-core/src/ragbits/core/llms/base.py:125: in generate
    response = await self.generate_raw(prompt, options=options)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ragbits.core.llms.litellm.LiteLLM object at 0x7f15b3e18af0>
prompt = <unit.llms.test_litellm.test_generation_with_static_prompt.<locals>.StaticPrompt object at 0x7f15b3e187f0>

    async def generate_raw(
        self,
        prompt: BasePrompt,
        *,
        options: LLMClientOptionsT | None = None,
    ) -> str:
        """
        Prepares and sends a prompt to the LLM and returns the raw response (without parsing).
    
        Args:
            prompt: Formatted prompt template with conversation.
            options: Options to use for the LLM client.
    
        Returns:
            Raw text response from LLM.
        """
        merged_options = (self.default_options | options) if options else self.default_options
>       response = await self.client.call(
            conversation=self._format_chat_for_llm(prompt),
            options=merged_options,
            json_mode=prompt.json_mode,
            output_schema=prompt.output_schema(),
        )
E       AttributeError: 'LiteLLM' object has no attribute 'client'

packages/ragbits-core/src/ragbits/core/llms/base.py:83: AttributeError

Check failure on line 117 in packages/ragbits-core/tests/unit/llms/test_litellm.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_litellm.test_generation_with_static_prompt_with_parser

AttributeError: 'LiteLLM' object has no attribute 'client'
Raw output
async def test_generation_with_static_prompt_with_parser():
        """Test generation of a response with a static prompt with a parser."""
    
        class StaticPromptWithParser(Prompt[None, int]):
            """A static prompt."""
    
            user_prompt = "Hello, how are you?"
    
        llm = LiteLLM(api_key="test_key")
        prompt = StaticPromptWithParser()
        options = LiteLLMOptions(mock_response="42")
>       output = await llm.generate(prompt, options=options)

packages/ragbits-core/tests/unit/llms/test_litellm.py:117: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
packages/ragbits-core/src/ragbits/core/llms/base.py:125: in generate
    response = await self.generate_raw(prompt, options=options)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ragbits.core.llms.litellm.LiteLLM object at 0x7f15b3dfda20>
prompt = <unit.llms.test_litellm.test_generation_with_static_prompt_with_parser.<locals>.StaticPromptWithParser object at 0x7f15b3dfd180>

    async def generate_raw(
        self,
        prompt: BasePrompt,
        *,
        options: LLMClientOptionsT | None = None,
    ) -> str:
        """
        Prepares and sends a prompt to the LLM and returns the raw response (without parsing).
    
        Args:
            prompt: Formatted prompt template with conversation.
            options: Options to use for the LLM client.
    
        Returns:
            Raw text response from LLM.
        """
        merged_options = (self.default_options | options) if options else self.default_options
>       response = await self.client.call(
            conversation=self._format_chat_for_llm(prompt),
            options=merged_options,
            json_mode=prompt.json_mode,
            output_schema=prompt.output_schema(),
        )
E       AttributeError: 'LiteLLM' object has no attribute 'client'

packages/ragbits-core/src/ragbits/core/llms/base.py:83: AttributeError

Check failure on line 140 in packages/ragbits-core/tests/unit/llms/test_litellm.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_litellm.test_generation_with_pydantic_output

AttributeError: 'LiteLLM' object has no attribute 'client'
Raw output
async def test_generation_with_pydantic_output():
        """Test generation of a response with a Pydantic output."""
    
        class OutputModel(BaseModel):
            """Output model for the prompt."""
    
            response: str
            happiness: int
    
        class PydanticPrompt(Prompt[None, OutputModel]):
            """A Pydantic prompt."""
    
            user_prompt = "Hello, how are you?"
    
        llm = LiteLLM(api_key="test_key")
        prompt = PydanticPrompt()
        options = LiteLLMOptions(mock_response='{"response": "I\'m fine, thank you.", "happiness": 100}')
>       output = await llm.generate(prompt, options=options)

packages/ragbits-core/tests/unit/llms/test_litellm.py:140: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
packages/ragbits-core/src/ragbits/core/llms/base.py:125: in generate
    response = await self.generate_raw(prompt, options=options)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ragbits.core.llms.litellm.LiteLLM object at 0x7f15b3e2cf40>
prompt = <unit.llms.test_litellm.test_generation_with_pydantic_output.<locals>.PydanticPrompt object at 0x7f15b3e2c9d0>

    async def generate_raw(
        self,
        prompt: BasePrompt,
        *,
        options: LLMClientOptionsT | None = None,
    ) -> str:
        """
        Prepares and sends a prompt to the LLM and returns the raw response (without parsing).
    
        Args:
            prompt: Formatted prompt template with conversation.
            options: Options to use for the LLM client.
    
        Returns:
            Raw text response from LLM.
        """
        merged_options = (self.default_options | options) if options else self.default_options
>       response = await self.client.call(
            conversation=self._format_chat_for_llm(prompt),
            options=merged_options,
            json_mode=prompt.json_mode,
            output_schema=prompt.output_schema(),
        )
E       AttributeError: 'LiteLLM' object has no attribute 'client'

packages/ragbits-core/src/ragbits/core/llms/base.py:83: AttributeError