Skip to content

Commit

Permalink
refactor(prompts): Merge the dev-kit package into core (#66)
Browse files Browse the repository at this point in the history
  • Loading branch information
ludwiktrammer authored Oct 7, 2024
1 parent a186751 commit ec46bec
Show file tree
Hide file tree
Showing 19 changed files with 185 additions and 291 deletions.
6 changes: 5 additions & 1 deletion packages/ragbits-core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ classifiers = [
]
dependencies = [
"jinja2>=3.1.4",
"pydantic>=2.9.1"
"pydantic>=2.9.1",
"typer~=0.12.5",
]

[project.optional-dependencies]
Expand All @@ -48,6 +49,9 @@ local = [
"transformers~=4.44.2",
"numpy~=1.24.0"
]
lab = [
"gradio~=4.44.0",
]

[tool.uv]
dev-dependencies = [
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import typer

from .prompt_lab.app import lab_app
from .promptfoo import generate_configs
from .prompt.lab.app import lab_app
from .prompt.promptfoo import generate_configs

prompts_app = typer.Typer(no_args_is_help=True)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,21 @@
from dataclasses import dataclass, field, replace
from typing import Any

import gradio as gr
try:
import gradio as gr

HAS_GRADIO = True
except ImportError:
HAS_GRADIO = False

import jinja2
from pydantic import BaseModel
from rich.console import Console

from ragbits.core.llms import LiteLLM
from ragbits.core.llms.clients import LiteLLMOptions
from ragbits.core.prompt import Prompt
from ragbits.dev_kit.prompt_lab.discovery.prompt_discovery import DEFAULT_FILE_PATTERN, PromptDiscovery
from ragbits.core.prompt.discovery.prompt_discovery import DEFAULT_FILE_PATTERN, PromptDiscovery


@dataclass(frozen=True)
Expand All @@ -33,7 +39,7 @@ class PromptState:
llm_api_key: str | None = None


def render_prompt(index: int, system_prompt: str, user_prompt: str, state: gr.State, *args: Any) -> gr.State:
def render_prompt(index: int, system_prompt: str, user_prompt: str, state: PromptState, *args: Any) -> PromptState:
"""
Renders a prompt based on the provided key, system prompt, user prompt, and input variables.
Expand All @@ -48,7 +54,7 @@ def render_prompt(index: int, system_prompt: str, user_prompt: str, state: gr.St
args (tuple): A tuple of input values for the prompt.
Returns:
gr.State: The updated application state object.
PromptState: The updated application state object.
"""
prompt_class = state.prompts[index]
prompt_class.system_prompt_template = jinja2.Template(system_prompt)
Expand All @@ -64,37 +70,39 @@ def render_prompt(index: int, system_prompt: str, user_prompt: str, state: gr.St
return state


def list_prompt_choices(state: gr.State) -> list[tuple[str, int]]:
def list_prompt_choices(state: PromptState) -> list[tuple[str, int]]:
"""
Returns a list of prompt choices based on the discovered prompts.
This function generates a list of tuples containing the names of discovered prompts and their
corresponding indices.
Args:
state (gr.State): The application state object.
state (PromptState): The application state object.
Returns:
list[tuple[str, int]]: A list of tuples containing prompt names and their indices.
"""
return [(prompt.__name__, idx) for idx, prompt in enumerate(state.value.prompts)]
return [(prompt.__name__, idx) for idx, prompt in enumerate(state.prompts)]


def send_prompt_to_llm(state: gr.State) -> str:
def send_prompt_to_llm(state: PromptState) -> str:
"""
Sends the current prompt to the LLM and returns the response.
This function creates a LiteLLM client using the LLM model name and API key stored in the
application state. It then calls the LLM client to generate a response based on the current prompt.
Args:
state (gr.State): The application state object.
state (PromptState): The application state object.
Returns:
str: The response generated by the LLM.
"""
assert state.llm_model_name is not None, "LLM model name is not set."
llm_client = LiteLLM(model_name=state.llm_model_name, api_key=state.llm_api_key)

assert state.rendered_prompt is not None, "Prompt has not been rendered yet."
try:
response = asyncio.run(
llm_client.client.call(conversation=state.rendered_prompt.chat, options=LiteLLMOptions())
Expand Down Expand Up @@ -133,6 +141,13 @@ def lab_app( # pylint: disable=missing-param-doc
Launches the interactive application for listing, rendering, and testing prompts
defined within the current project.
"""
if not HAS_GRADIO:
Console(stderr=True).print(
"To use Prompt Lab, you need the Gradio library. Please install it using the following command:\n"
r"[b]pip install ragbits-core\[lab][/b]"
)
return

prompts = PromptDiscovery(file_pattern=file_pattern).discover()

if not prompts:
Expand All @@ -154,7 +169,7 @@ def lab_app( # pylint: disable=missing-param-doc
)

prompt_selection_dropdown = gr.Dropdown(
choices=list_prompt_choices(prompts_state), value=0, label="Select Prompt"
choices=list_prompt_choices(prompts_state.value), value=0, label="Select Prompt"
)

@gr.render(inputs=[prompt_selection_dropdown, prompts_state])
Expand Down Expand Up @@ -185,8 +200,12 @@ def show_split(index: int, state: gr.State) -> None:
)

with gr.Column():
system_message = state.rendered_prompt.system_message if state.rendered_prompt else ""
gr.Textbox(label="Rendered System Prompt", value=system_message, interactive=False)
rendered_system_prompt = (
state.rendered_prompt.rendered_system_prompt if state.rendered_prompt else ""
)
gr.Textbox(
label="Rendered System Prompt", value=rendered_system_prompt, interactive=False
)

with gr.Row():
with gr.Column():
Expand All @@ -195,8 +214,10 @@ def show_split(index: int, state: gr.State) -> None:
)

with gr.Column():
user_message = state.rendered_prompt.user_message if state.rendered_prompt else ""
gr.Textbox(label="Rendered User Prompt", value=user_message, interactive=False)
rendered_user_prompt = (
state.rendered_prompt.rendered_user_prompt if state.rendered_prompt else ""
)
gr.Textbox(label="Rendered User Prompt", value=rendered_user_prompt, interactive=False)

llm_enabled = state.llm_model_name is not None
prompt_ready = state.rendered_prompt is not None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
import yaml
from rich.console import Console

from ragbits.dev_kit.prompt_lab.discovery import PromptDiscovery
from ragbits.dev_kit.prompt_lab.discovery.prompt_discovery import DEFAULT_FILE_PATTERN
from ragbits.core.prompt.discovery import PromptDiscovery
from ragbits.core.prompt.discovery.prompt_discovery import DEFAULT_FILE_PATTERN


def generate_configs(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from pathlib import Path

from ragbits.dev_kit.prompt_lab.discovery.prompt_discovery import PromptDiscovery
from ragbits.core.prompt.discovery.prompt_discovery import PromptDiscovery

current_dir = Path(__file__).parent

Expand Down
26 changes: 0 additions & 26 deletions packages/ragbits-dev-kit/README.md

This file was deleted.

64 changes: 0 additions & 64 deletions packages/ragbits-dev-kit/pyproject.toml

This file was deleted.

Empty file.
6 changes: 1 addition & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@ description = "Ragbits development workspace"
readme = "README.md"
requires-python = ">=3.10"
dependencies = [
"ragbits[litellm,local,chromadb]",
"ragbits-dev-kit",
"ragbits[litellm,local,lab,chromadb]",
"ragbits-document-search[gcs]",
"ragbits-cli"
]
Expand All @@ -23,14 +22,12 @@ dev-dependencies = [

[tool.uv.sources]
ragbits = { workspace = true }
ragbits-dev-kit = { workspace = true }
ragbits-document-search = { workspace = true }
ragbits-cli = { workspace = true }

[tool.uv.workspace]
members = [
"packages/ragbits-core",
"packages/ragbits-dev-kit",
"packages/ragbits-document-search",
"packages/ragbits-cli"
]
Expand Down Expand Up @@ -121,7 +118,6 @@ no_implicit_optional = true
explicit_package_bases = true
mypy_path = [
'packages/ragbits-core/src',
'packages/ragbits-dev-kit/src',
'packages/ragbits-document-search/src',
'packages/ragbits-cli/src',
]
Expand Down
Loading

0 comments on commit ec46bec

Please sign in to comment.