Skip to content

func

ASSISTANT_ROLE module-attribute

ASSISTANT_ROLE = MessageRole(ASSISTANT)

The assistant role with name not specified.

MaybeOneOrMany module-attribute

MaybeOneOrMany = Union[_T, Sequence[_T], None]

A type that can be either a single item, a sequence of items, or None.

OneOrMany module-attribute

OneOrMany = Union[_T, Sequence[_T]]

A type that can be either a single item or a sequence of items.

SYSTEM_ROLE module-attribute

SYSTEM_ROLE = MessageRole(SYSTEM)

The system role with name not specified.

StrOrImg module-attribute

StrOrImg = Union[String, Image]

A type that can be either a string or an image.

String module-attribute

String = Union[StringFuture, str]

String is a type alias for StringFuture or str.

TOOL_ROLE module-attribute

TOOL_ROLE = MessageRole(TOOL)

The tool role with name not specified.

USER_ROLE module-attribute

USER_ROLE = MessageRole(USER)

The user role with name not specified.

AIMessage

AIMessage(
    content: Any = None,
    *,
    role: Optional[MessageRole] = None,
    tool_calls: Optional[List[ToolCall]] = None,
    **kwargs: Any
)

Bases: BaseMessage

An assistant message in the conversation.

Source code in src/appl/core/message.py
def __init__(
    self,
    content: Any = None,
    *,
    role: Optional[MessageRole] = None,
    tool_calls: Optional[List[ToolCall]] = None,
    **kwargs: Any,
) -> None:
    """Create an assistant message with content and extra arguments."""
    if tool_calls is None:
        tool_calls = []
    super().__init__(content=content, role=role, tool_calls=tool_calls, **kwargs)
    self.validate_role(ASSISTANT_ROLE)

is_ai property

is_ai: bool

Whether the message is an assistant message.

is_system property

is_system: bool

Whether the message is a system message.

is_tool property

is_tool: bool

Whether the message is a tool message.

is_user property

is_user: bool

Whether the message is a user message.

get_content

get_content(as_str: bool = False) -> Any

Get the content of the message.

Materialize the content if it is a FutureValue.

Source code in src/appl/core/message.py
def get_content(self, as_str: bool = False) -> Any:
    """Get the content of the message.

    Materialize the content if it is a FutureValue.
    """
    content = self.content
    if content is not None:
        if isinstance(content, ContentList):
            return content.get_contents()  # return a list of dict
        if isinstance(content, FutureValue):
            # materialize the content
            content = content.val
        if as_str:  # not apply to ContentList
            content = str(content)
    return content

get_dict

get_dict(
    default_role: Optional[MessageRole] = None,
) -> Dict[str, Any]

Return a dict representation of the message.

Source code in src/appl/core/message.py
def get_dict(self, default_role: Optional[MessageRole] = None) -> Dict[str, Any]:
    """Return a dict representation of the message."""
    data = super().get_dict(default_role)
    if len(self.tool_calls):
        data["tool_calls"] = [call.get_dict() for call in self.tool_calls]
    return data

merge

merge(other: 'BaseMessage') -> Optional['Message']

Merge the message with another message.

Source code in src/appl/core/message.py
def merge(self: "Message", other: "BaseMessage") -> Optional["Message"]:
    """Merge the message with another message."""
    if self.should_merge(other):
        # merge the content
        res = self.model_copy()
        if isinstance(other.content, ContentList) and not isinstance(
            res.content, ContentList
        ):
            res.content = ContentList(contents=[res.content])
        res.content += other.content
        return res
    return None

should_merge

should_merge(other: 'BaseMessage') -> bool

Whether the message should be merged with the other message.

Source code in src/appl/core/message.py
def should_merge(self, other: "BaseMessage") -> bool:
    """Whether the message should be merged with the other message."""
    if self.is_tool or other.is_tool:
        # not merge tool messages
        return False
    if self.content is None or other.content is None:
        return False
    return self.role == other.role

str_with_default_role

str_with_default_role(
    default_role: Optional[MessageRole] = None,
) -> str

Return the string representation of the message with default role.

Source code in src/appl/core/message.py
def str_with_default_role(self, default_role: Optional[MessageRole] = None) -> str:
    """Return the string representation of the message with default role."""
    return self._get_colored_content(self.role or default_role)

validate_role

validate_role(target_role: MessageRole) -> None

Validate the role of the message, fill the role if not provided.

Source code in src/appl/core/message.py
def validate_role(self, target_role: MessageRole) -> None:
    """Validate the role of the message, fill the role if not provided."""
    target_type = target_role.type
    if target_type is None:
        raise ValueError("Target role type must be provided.")
    if self.role is None:
        self.role = target_role
    elif self.role.type is None:
        # fill the role type as the target type
        self.role = MessageRole(type=target_type, name=self.role.name)
    elif self.role.type != target_type:
        raise ValueError(f"Invalid role for {target_type} message: {self.role}")

BaseMessage

BaseMessage(content: Any = None, *args: Any, **kwargs: Any)

Bases: BaseModel, ABC

The base class for messages.

Provides a more flexible way to create a message.

Source code in src/appl/core/message.py
def __init__(self, content: Any = None, *args: Any, **kwargs: Any) -> None:
    """Create a message with content and extra arguments.

    Provides a more flexible way to create a message.
    """
    super().__init__(content=content, *args, **kwargs)

is_ai property

is_ai: bool

Whether the message is an assistant message.

is_system property

is_system: bool

Whether the message is a system message.

is_tool property

is_tool: bool

Whether the message is a tool message.

is_user property

is_user: bool

Whether the message is a user message.

get_content

get_content(as_str: bool = False) -> Any

Get the content of the message.

Materialize the content if it is a FutureValue.

Source code in src/appl/core/message.py
def get_content(self, as_str: bool = False) -> Any:
    """Get the content of the message.

    Materialize the content if it is a FutureValue.
    """
    content = self.content
    if content is not None:
        if isinstance(content, ContentList):
            return content.get_contents()  # return a list of dict
        if isinstance(content, FutureValue):
            # materialize the content
            content = content.val
        if as_str:  # not apply to ContentList
            content = str(content)
    return content

get_dict

get_dict(
    default_role: Optional[MessageRole] = None,
) -> Dict[str, Any]

Return a dict representation of the message.

Source code in src/appl/core/message.py
def get_dict(self, default_role: Optional[MessageRole] = None) -> Dict[str, Any]:
    """Return a dict representation of the message."""
    # materialize the content using str()
    role = self.role or default_role
    if role is None:
        raise ValueError("Role or default role must be provided.")
    if role.type is None:
        if default_role and default_role.type:
            role = MessageRole(type=default_role.type, name=role.name)
        else:
            raise ValueError("Role type must be provided.")
    data = {"content": self.get_content(as_str=True), **role.get_dict()}
    return data

merge

merge(other: 'BaseMessage') -> Optional['Message']

Merge the message with another message.

Source code in src/appl/core/message.py
def merge(self: "Message", other: "BaseMessage") -> Optional["Message"]:
    """Merge the message with another message."""
    if self.should_merge(other):
        # merge the content
        res = self.model_copy()
        if isinstance(other.content, ContentList) and not isinstance(
            res.content, ContentList
        ):
            res.content = ContentList(contents=[res.content])
        res.content += other.content
        return res
    return None

should_merge

should_merge(other: 'BaseMessage') -> bool

Whether the message should be merged with the other message.

Source code in src/appl/core/message.py
def should_merge(self, other: "BaseMessage") -> bool:
    """Whether the message should be merged with the other message."""
    if self.is_tool or other.is_tool:
        # not merge tool messages
        return False
    if self.content is None or other.content is None:
        return False
    return self.role == other.role

str_with_default_role

str_with_default_role(
    default_role: Optional[MessageRole] = None,
) -> str

Return the string representation of the message with default role.

Source code in src/appl/core/message.py
def str_with_default_role(self, default_role: Optional[MessageRole] = None) -> str:
    """Return the string representation of the message with default role."""
    return self._get_colored_content(self.role or default_role)

validate_role

validate_role(target_role: MessageRole) -> None

Validate the role of the message, fill the role if not provided.

Source code in src/appl/core/message.py
def validate_role(self, target_role: MessageRole) -> None:
    """Validate the role of the message, fill the role if not provided."""
    target_type = target_role.type
    if target_type is None:
        raise ValueError("Target role type must be provided.")
    if self.role is None:
        self.role = target_role
    elif self.role.type is None:
        # fill the role type as the target type
        self.role = MessageRole(type=target_type, name=self.role.name)
    elif self.role.type != target_type:
        raise ValueError(f"Invalid role for {target_type} message: {self.role}")

BaseServer

Bases: ABC

The base class for all servers.

Servers are responsible for communicating with the underlying model.

model_name abstractmethod property

model_name: str

The name of the model used by the server.

close abstractmethod

close()

Close the server.

Source code in src/appl/core/server.py
@abstractmethod
def close(self):
    """Close the server."""
    raise NotImplementedError

create

create(
    args: GenArgs, gen_id: str, **kwargs: Any
) -> CompletionResponse

Create a CompletionResponse from the model with given arguments.

Parameters:

  • args (GenArgs) –

    The arguments for generating the response

  • gen_id (str) –

    The ID of the generation

  • **kwargs (Any, default: {} ) –

    Additional keyword arguments

Returns: The response from the model.

Source code in src/appl/core/server.py
def create(self, args: GenArgs, gen_id: str, **kwargs: Any) -> CompletionResponse:
    """Create a CompletionResponse from the model with given arguments.

    Args:
        args: The arguments for generating the response
        gen_id: The ID of the generation
        **kwargs: Additional keyword arguments
    Returns:
        The response from the model.
    """
    log_llm_call_args = configs.getattrs("settings.logging.display.llm_call_args")
    log_llm_usage = configs.getattrs("settings.logging.display.llm_usage")
    log_llm_response = configs.getattrs("settings.logging.display.llm_response")

    create_args = self._get_create_args(args, **kwargs)
    if log_llm_call_args:
        logger.info(f"Call generation [{gen_id}] with args: {create_args}")

    results = self._create(gen_id=gen_id, **create_args)
    if log_llm_response:
        logger.info(f"Generation [{gen_id}] results: {results}")
    if results.usage and log_llm_usage:
        logger.info(f"Generation [{gen_id}] token usage: {results.usage}")
    if results.cost:
        if "mock_response" in create_args:
            if configs.getattrs("settings.logging.display.llm_cost"):
                logger.info(
                    f"Mock response, estimated cost for real request: {results.cost:.4f}"
                )
        else:
            _update_cost(
                self.model_name,
                results.cost,
                getattr(self, "_cost_currency", "USD"),
            )

    dump_args = create_args.copy()
    for k, v in dump_args.items():
        if k in ["response_format", "response_model"]:
            if isinstance(v, type) and issubclass(v, BaseModel):
                dump_args[k] = json.dumps(v.model_json_schema(), indent=4)

    def trace_gen_response(response: CompletionResponse) -> None:
        add_to_trace(
            GenerationResponseEvent(name=gen_id, args=dump_args, ret=str(response))
        )

    results.register_post_finish_callback(trace_gen_response)
    return results

BracketedDefinition

BracketedDefinition(
    name: Optional[String] = None,
    desc: Optional[String] = None,
    *,
    sep: String = ": ",
    details: Any = None,
    fstr: Optional[str] = None,
    var_name: Optional[str] = None
)

Bases: Definition

A Definition that is formatted with square brackets.

Parameters:

  • name (Optional[String], default: None ) –

    The name of the definition.

  • desc (Optional[String], default: None ) –

    A description of the definition.

  • sep (String, default: ': ' ) –

    The separator between the name and description.

  • details (Any, default: None ) –

    Additional details about the definition.

  • fstr (Optional[str], default: None ) –

    The format string for the definition.

  • var_name (Optional[str], default: None ) –

    The name of the variable that the definition is stored in.

Source code in src/appl/core/promptable/definition.py
def __init__(
    self,
    name: Optional[String] = None,
    desc: Optional[String] = None,
    *,
    sep: String = ": ",
    details: Any = None,
    fstr: Optional[str] = None,
    var_name: Optional[str] = None,
):
    """Initialize the Definition with the given name and description.

    Args:
        name: The name of the definition.
        desc: A description of the definition.
        sep: The separator between the name and description.
        details: Additional details about the definition.
        fstr: The format string for the definition.
        var_name: The name of the variable that the definition is stored in.
    """
    self.name = name or self.name or self.__doc__
    if self.name is None:
        raise ValueError("Name must be provided for Definition.")

    if desc is not None:
        self.desc = desc
    self.sep = sep
    self.details = details
    if fstr is not None:
        self.fstr = fstr
    self.var_name = var_name or self.name

    self._forks.append(self)

CallFuture

CallFuture(
    func: Callable,
    *args: Any,
    use_process: bool = False,
    lazy_eval: bool = False,
    **kwargs: Any
)

Bases: FutureValue

Represent a function call that may not be ready yet.

Parameters:

  • func (Callable) –

    The function to call.

  • *args (Any, default: () ) –

    The arguments of the function.

  • use_process (bool, default: False ) –

    Whether to use a process pool executor.

  • lazy_eval (bool, default: False ) –

    Whether to delay the start of the call until needed.

  • **kwargs (Any, default: {} ) –

    The keyword arguments of the function.

Source code in src/appl/core/types/futures.py
def __init__(
    self,
    func: Callable,
    *args: Any,
    use_process: bool = False,
    lazy_eval: bool = False,
    **kwargs: Any,
):
    """Initialize the CallFuture.

    Args:
        func: The function to call.
        *args: The arguments of the function.
        use_process: Whether to use a process pool executor.
        lazy_eval: Whether to delay the start of the call until needed.
        **kwargs: The keyword arguments of the function.
    """
    # ? maybe use a global executor from the config, or use thread-level executor if running in multi-threading.
    self._executor = (
        ProcessPoolExecutor(max_workers=1)
        if use_process
        else ThreadPoolExecutor(
            max_workers=1, thread_name_prefix=threading.current_thread().name
        )
    )
    self._submit_fn = lambda: self._executor.submit(func, *args, **kwargs)
    self._submitted = False
    self._info = func.__name__
    # self._debug = False
    # if self._debug:
    #     # arg and kwargs might contains future objects
    #     args_list = [f"{arg}" for arg in args] + [
    #         f"{k}={v!r}" for k, v in kwargs.items()
    #     ]
    #     args_str = ", ".join(args_list)
    #     self._info += f"({args_str})"
    if not lazy_eval:
        # delay the start of the call until needed
        self._submit()

future property

future

The future object of the call.

val property

val

The value of the future.

cancel

cancel() -> bool

Cancel the call.

Source code in src/appl/core/types/futures.py
def cancel(self) -> bool:
    """Cancel the call."""
    # Attempt to cancel the call
    res = self.future.cancel()
    if res:
        self._executor.shutdown()  # the executor is not needed anymore
    return res

done

done() -> bool

Check if the call has completed.

Source code in src/appl/core/types/futures.py
def done(self) -> bool:
    """Check if the call has completed."""
    # Check if the future has completed
    return self.future.done()

result

result(timeout: Optional[float] = None) -> Any

Get the result of the call.

Source code in src/appl/core/types/futures.py
def result(self, timeout: Optional[float] = None) -> Any:
    """Get the result of the call."""
    # This will block until the result is available
    res = self.future.result(timeout)
    self._executor.shutdown()  # the executor is not needed anymore
    return res

CmpStringFuture

CmpStringFuture(
    a: StringFuture,
    b: StringFuture,
    op: Callable[[str, str], bool],
)

Bases: FutureValue

Represent a comparison between a StringFuture and another value.

Source code in src/appl/core/types/futures.py
def __init__(
    self, a: "StringFuture", b: "StringFuture", op: Callable[[str, str], bool]
):
    """Initialize the CmpStringFuture."""
    self._a = a
    self._b = b
    self._op = op

val property

val

The value of the future.

CompletionResponse

Bases: BaseModel

A class wrapping the response from the LLM model.

For a streaming response, it tracks the chunks of the response and builds the complete response when the streaming is finished.

chunks class-attribute instance-attribute

chunks: List[Union[ModelResponse, ChatCompletionChunk]] = (
    Field(
        [],
        description="The chunks of the response when streaming",
    )
)

The chunks of the response when streaming.

complete_response property

complete_response: Union[ModelResponse, ChatCompletion]

The complete response from the model. This will block until the response is finished.

cost class-attribute instance-attribute

cost: Optional[float] = Field(
    None, description="The cost of the completion"
)

The cost of the completion.

is_finished class-attribute instance-attribute

is_finished: bool = Field(
    False,
    description="Whether the response stream is finished",
)

Whether the response stream is finished.

is_stream class-attribute instance-attribute

is_stream: bool = Field(
    False, description="Whether the response is a stream"
)

Whether the response is a stream.

message class-attribute instance-attribute

message: Optional[str] = Field(
    None,
    description="The top-choice message from the completion",
)

The top-choice message from the completion.

post_finish_callbacks class-attribute instance-attribute

post_finish_callbacks: List[Callable] = Field(
    [], description="The post finish callbacks"
)

The post finish callbacks.

raw_response class-attribute instance-attribute

raw_response: Any = Field(
    None, description="The raw response from the model"
)

The raw response from the model.

response_model class-attribute instance-attribute

response_model: Any = Field(
    None,
    description="The BaseModel's subclass specifying the response format.",
)

The BaseModel's subclass specifying the response format.

response_obj class-attribute instance-attribute

response_obj: Any = Field(
    None,
    description="The response object of response model, could be a stream",
)

The response object of response model, could be a stream.

results property

results: Any

The results of the response.

Returns:

  • message ( str ) –

    The message if the response is a text completion.

  • tool_calls ( List[ToolCall] ) –

    The tool calls if the response is a list of tool calls.

  • response_obj ( Any ) –

    The object if the response is a response object.

tool_calls class-attribute instance-attribute

tool_calls: List[ToolCall] = Field(
    [], description="The tool calls"
)

The tool calls.

type property

The type of the response.

usage class-attribute instance-attribute

usage: Optional[CompletionUsage] = Field(
    None, description="The usage of the completion"
)

The usage of the completion.

format_stream

format_stream()

Format the stream response as a text generator.

Source code in src/appl/core/response.py
def format_stream(self):
    """Format the stream response as a text generator."""
    suffix = ""
    for chunk in iter(self):
        # chunk: Union[ModelResponse, ChatCompletionChunk]
        delta: Union[Delta, ChoiceDelta] = chunk.choices[0].delta  # type: ignore

        if delta is not None:
            if delta.content is not None:
                yield delta.content
            elif getattr(delta, "tool_calls", None):
                f: Union[Function, ChoiceDeltaToolCallFunction] = delta.tool_calls[
                    0
                ].function  # type: ignore
                if f.name is not None:
                    if suffix:
                        yield f"{suffix}, "
                    yield f"{f.name}("
                    suffix = ")"
                if f.arguments is not None:
                    yield f.arguments
    yield suffix

register_post_finish_callback

register_post_finish_callback(callback: Callable) -> None

Register a post finish callback.

The callback will be called after the response is finished.

Source code in src/appl/core/response.py
def register_post_finish_callback(self, callback: Callable) -> None:
    """Register a post finish callback.

    The callback will be called after the response is finished.
    """
    if self.is_finished:
        callback(self)
    else:
        self.post_finish_callbacks.append(callback)

set_response_obj

set_response_obj(response_obj: Any) -> None

Set the response object.

Source code in src/appl/core/response.py
def set_response_obj(self, response_obj: Any) -> None:
    """Set the response object."""
    self.response_obj = response_obj

streaming

streaming(
    display: bool = True, title: str = "APPL Streaming"
) -> CompletionResponse

Stream the response object and finish the response.

Source code in src/appl/core/response.py
def streaming(
    self, display: bool = True, title: str = "APPL Streaming"
) -> "CompletionResponse":
    """Stream the response object and finish the response."""
    if not self.is_stream:
        raise ValueError("Cannot iterate over non-streaming response")
    if self.is_finished:
        return self

    if self.response_obj is not None:
        target = self.response_obj
    else:
        target = self.format_stream()
    if display:
        refresh_interval = configs.getattrs(
            "settings.logging.display.stream_interval", 1.0
        )
        start_time = time.time()

        def panel(
            content: str, iter_index: Optional[int] = None, truncate: bool = False
        ) -> Panel:
            style = "magenta"
            display_title = title
            if iter_index is not None:
                time_elapsed = time.time() - start_time
                avg_iters_per_sec = (iter_index + 1) / time_elapsed
                stream_info = (
                    f"[{time_elapsed:.3f}s ({avg_iters_per_sec:.2f} it/s)]"
                )
                display_title += f" - {stream_info}"
            return make_panel(
                content, title=display_title, style=style, truncate=truncate
            )

        with Live(
            panel("Waiting for Response ..."),
            refresh_per_second=refresh_interval,
            # vertical_overflow="visible", # manually display the tail lines instead
        ) as live:
            content = ""
            for i, chunk in enumerate(iter(target)):
                if isinstance(chunk, BaseModel):
                    content = json.dumps(chunk.model_dump(), indent=2)
                else:
                    content += str(chunk)
                live.update(panel(content, i, truncate=True))
                # live.refresh() # might be too frequent
            # display untruncated content at the end
            live.update(panel(content, i))
            live.refresh()
    else:
        for chunk in iter(target):
            pass
    if self.response_obj is not None:
        self.set_response_obj(chunk)
    return self

ContentList

Bases: BaseModel

Represent a list of contents containing text and images.

append

append(content: StrOrImg) -> None

Append a content to the list.

If the last content is a string, it will be concatenated with the new content.

Source code in src/appl/core/types/content.py
def append(self, content: StrOrImg) -> None:
    """Append a content to the list.

    If the last content is a string, it will be concatenated with the new content.
    """
    if is_string(content) and len(self.contents) and is_string(self.contents[-1]):
        self.contents[-1] += content  # type: ignore
    else:
        self.contents.append(content)

extend

extend(contents: list[StrOrImg]) -> None

Extend the list with multiple contents.

Source code in src/appl/core/types/content.py
def extend(self, contents: list[StrOrImg]) -> None:
    """Extend the list with multiple contents."""
    for content in contents:
        self.append(content)

get_contents

get_contents() -> List[Dict[str, Any]]

Return the contents as a list of dictionaries.

Source code in src/appl/core/types/content.py
def get_contents(self) -> List[Dict[str, Any]]:
    """Return the contents as a list of dictionaries."""

    def get_dict(content):
        if isinstance(content, Image):
            image_args = {"url": content.url}
            if content.detail:
                image_args["detail"] = content.detail
            return {"type": "image_url", "image_url": image_args}
        return {"type": "text", "text": str(content)}

    return [get_dict(c) for c in self.contents]

Definition

Definition(
    name: Optional[String] = None,
    desc: Optional[String] = None,
    *,
    sep: String = ": ",
    details: Any = None,
    fstr: Optional[str] = None,
    var_name: Optional[str] = None
)

Bases: Promptable, Formattable

Represent a definition of a concept.

Attributes:

  • fstr (str) –

    The format string for the definition.

  • name (Optional[String]) –

    The name of the definition.

  • desc (String) –

    A description of the definition.

  • _forks (List[Definition]) –

    A list of all instances of this class.

Parameters:

  • name (Optional[String], default: None ) –

    The name of the definition.

  • desc (Optional[String], default: None ) –

    A description of the definition.

  • sep (String, default: ': ' ) –

    The separator between the name and description.

  • details (Any, default: None ) –

    Additional details about the definition.

  • fstr (Optional[str], default: None ) –

    The format string for the definition.

  • var_name (Optional[str], default: None ) –

    The name of the variable that the definition is stored in.

Source code in src/appl/core/promptable/definition.py
def __init__(
    self,
    name: Optional[String] = None,
    desc: Optional[String] = None,
    *,
    sep: String = ": ",
    details: Any = None,
    fstr: Optional[str] = None,
    var_name: Optional[str] = None,
):
    """Initialize the Definition with the given name and description.

    Args:
        name: The name of the definition.
        desc: A description of the definition.
        sep: The separator between the name and description.
        details: Additional details about the definition.
        fstr: The format string for the definition.
        var_name: The name of the variable that the definition is stored in.
    """
    self.name = name or self.name or self.__doc__
    if self.name is None:
        raise ValueError("Name must be provided for Definition.")

    if desc is not None:
        self.desc = desc
    self.sep = sep
    self.details = details
    if fstr is not None:
        self.fstr = fstr
    self.var_name = var_name or self.name

    self._forks.append(self)

FormatterMeta

Bases: ABCMeta

Metaclass for classes that can be formatted.

FutureValue

Bases: ABC

Represents a value that may not be ready yet.

val property

val

The value of the future.

Image

Image(url: str, detail: Optional[str] = None)

Bases: BaseModel

Represent an image in the message.

See the guide for more information about the detail level.

Source code in src/appl/core/types/content.py
def __init__(self, url: str, detail: Optional[str] = None) -> None:
    """Initialize the image with the URL and detail level.

    See [the guide](https://platform.openai.com/docs/guides/vision/low-or-high-fidelity-image-understanding)
    for more information about the detail level.
    """
    super().__init__(url=url, detail=detail)

from_file classmethod

from_file(
    file: PathLike, detail: Optional[str] = None
) -> Image

Construct an image prompt from an image file.

Source code in src/appl/core/types/content.py
@classmethod
def from_file(cls, file: PathLike, detail: Optional[str] = None) -> "Image":
    """Construct an image prompt from an image file."""
    image = PIL.Image.open(file)
    return cls.from_image(image, detail)

from_image classmethod

from_image(
    image: ImageFile, detail: Optional[str] = None
) -> Image

Construct an image prompt from a PIL ImageFile.

Source code in src/appl/core/types/content.py
@classmethod
def from_image(cls, image: ImageFile, detail: Optional[str] = None) -> "Image":
    """Construct an image prompt from a PIL ImageFile."""
    buffered = BytesIO()
    # Save the image to the buffer in PNG format
    image.save(buffered, format="PNG")
    # Get the byte data from the buffer
    img_byte = buffered.getvalue()
    img_base64 = base64.b64encode(img_byte).decode("utf-8")
    return cls(url=f"data:image/png;base64,{img_base64}", detail=detail)

Indexing

Indexing(
    method: Optional[str] = None,
    ind: int = 0,
    prefix: str = "",
    suffix: Optional[str] = None,
)

The indexing method for the printer.

Source code in src/appl/core/printer.py
def __init__(
    self,
    method: Optional[str] = None,
    ind: int = 0,
    prefix: str = "",
    suffix: Optional[str] = None,
):
    """Initialize the indexing method."""
    self._method = method
    self._ind = ind
    self._prefix = prefix
    self._suffix = suffix

get_index

get_index(ind: Optional[int] = None) -> str

Get the index string for the current or given index.

Source code in src/appl/core/printer.py
def get_index(self, ind: Optional[int] = None) -> str:
    """Get the index string for the current or given index."""
    if ind is None:
        ind = self._ind
        self._ind += 1
    if ind < 0:
        raise ValueError("Indexing method does not support negative indexing.")
    return self._get_index(ind)

MessageRole

MessageRole(
    type: Optional[str] = None, name: Optional[str] = None
)

Bases: BaseModel

The role of the message owner.

Parameters:

  • type (Optional[str], default: None ) –

    The type of the role.

  • name (Optional[str], default: None ) –

    An optional name for the role, differentiate between roles of the same type."

Source code in src/appl/core/types/role.py
def __init__(self, type: Optional[str] = None, name: Optional[str] = None):
    """Initialize the MessageRole object.

    Args:
        type: The type of the role.
        name: An optional name for the role, differentiate between roles of the same type."
    """
    super().__init__(type=type, name=name)

is_assistant property

is_assistant: bool

Whether the role is an assistant role.

is_system property

is_system: bool

Whether the role is a system role.

is_tool property

is_tool: bool

Whether the role is a tool role.

is_user property

is_user: bool

Whether the role is a user role.

get_dict

get_dict() -> Dict[str, Any]

Get the role as a dictionary.

Source code in src/appl/core/types/role.py
def get_dict(self) -> Dict[str, Any]:
    """Get the role as a dictionary."""
    data = {"role": self.type}
    if self.name:
        data["name"] = self.name
    return data

PromptPrinter

PromptPrinter(
    states: Optional[List[PrinterState]] = None,
    is_newline: bool = True,
)

A class to print prompt records as conversation.

The printer maintains a stack of printer states about the current role, separator, indexing, and indentation.

Source code in src/appl/core/printer.py
def __init__(
    self, states: Optional[List[PrinterState]] = None, is_newline: bool = True
) -> None:
    """Initialize the prompt printer."""
    if states is None:
        states = [PrinterState()]
    self._states = states
    self._is_newline = is_newline

states property

states: List[PrinterState]

The stack of printer states.

pop

pop() -> None

Pop the last printer state from the stack.

Source code in src/appl/core/printer.py
def pop(self) -> None:
    """Pop the last printer state from the stack."""
    if len(self._states) == 1:
        raise ValueError("Cannot pop the first state.")
    self._states.pop()

push

push(data: PrinterPush) -> None

Push a new printer state to the stack.

Source code in src/appl/core/printer.py
def push(self, data: PrinterPush) -> None:
    """Push a new printer state to the stack."""
    self._push(**data.__dict__)

Promptable

Bases: ABC

Interface for objects that can be converted to a prompt string.

ResponseType

Bases: str, Enum

The type of generation response.

IMAGE class-attribute instance-attribute

IMAGE = 'image'

An image.

OBJECT class-attribute instance-attribute

OBJECT = 'obj'

An instance of a response model.

TEXT class-attribute instance-attribute

TEXT = 'text'

A text completion.

TOOL_CALL class-attribute instance-attribute

TOOL_CALL = 'tool_calls'

A list of tool calls.

UNFINISHED class-attribute instance-attribute

UNFINISHED = 'unfinished'

The response is not finished.

StringFuture

StringFuture(content: Any = '', set_value: bool = False)

Bases: FutureValue, BaseModel

StringFuture is a string that may not be ready yet.

Source code in src/appl/core/types/futures.py
def __init__(self, content: Any = "", set_value: bool = False):
    """Initialize the StringFuture."""
    if set_value:
        if not isinstance(content, List):
            raise ValueError("Cannot set value to non-list.")
        s = content
    else:
        s = [content]
    super().__init__(s=s)

val property

val

The value of the future.

from_list classmethod

from_list(content: List[Any]) -> StringFuture

Create a StringFuture from a list of content.

Source code in src/appl/core/types/futures.py
@classmethod
def from_list(cls, content: List[Any]) -> "StringFuture":
    """Create a StringFuture from a list of content."""
    return cls(content, set_value=True)

join

join(iterable: Iterable[StringFuture]) -> StringFuture

Concatenate any number of strings.

The StringFuture whose method is called is inserted in between each given StringFuture. The result is returned as a new StringFuture.

Source code in src/appl/core/types/futures.py
def join(self, iterable: Iterable["StringFuture"]) -> "StringFuture":
    """Concatenate any number of strings.

    The StringFuture whose method is called is inserted in between each
    given StringFuture. The result is returned as a new StringFuture.
    """
    result = []
    for i, x in enumerate(iterable):
        if i != 0:
            result.append(self)
        result.append(x)
    return StringFuture.from_list(result)

materialized

materialized() -> StringFuture

Materialize the StringFuture.

Source code in src/appl/core/types/futures.py
def materialized(self) -> "StringFuture":
    """Materialize the StringFuture."""
    self.s = [self._collapse()]
    return self

serialize

serialize() -> str

Serialize the StringFuture.

Source code in src/appl/core/types/futures.py
def serialize(self) -> str:
    """Serialize the StringFuture."""
    return str(self)

SystemMessage

SystemMessage(
    content: Any = None,
    *,
    role: Optional[MessageRole] = None,
    **kwargs: Any
)

Bases: BaseMessage

A system message in the conversation.

Source code in src/appl/core/message.py
def __init__(
    self,
    content: Any = None,
    *,
    role: Optional[MessageRole] = None,
    **kwargs: Any,
) -> None:
    """Create a system message with content and extra arguments."""
    super().__init__(content=content, role=role, **kwargs)
    self.validate_role(SYSTEM_ROLE)

is_ai property

is_ai: bool

Whether the message is an assistant message.

is_system property

is_system: bool

Whether the message is a system message.

is_tool property

is_tool: bool

Whether the message is a tool message.

is_user property

is_user: bool

Whether the message is a user message.

get_content

get_content(as_str: bool = False) -> Any

Get the content of the message.

Materialize the content if it is a FutureValue.

Source code in src/appl/core/message.py
def get_content(self, as_str: bool = False) -> Any:
    """Get the content of the message.

    Materialize the content if it is a FutureValue.
    """
    content = self.content
    if content is not None:
        if isinstance(content, ContentList):
            return content.get_contents()  # return a list of dict
        if isinstance(content, FutureValue):
            # materialize the content
            content = content.val
        if as_str:  # not apply to ContentList
            content = str(content)
    return content

get_dict

get_dict(
    default_role: Optional[MessageRole] = None,
) -> Dict[str, Any]

Return a dict representation of the message.

Source code in src/appl/core/message.py
def get_dict(self, default_role: Optional[MessageRole] = None) -> Dict[str, Any]:
    """Return a dict representation of the message."""
    # materialize the content using str()
    role = self.role or default_role
    if role is None:
        raise ValueError("Role or default role must be provided.")
    if role.type is None:
        if default_role and default_role.type:
            role = MessageRole(type=default_role.type, name=role.name)
        else:
            raise ValueError("Role type must be provided.")
    data = {"content": self.get_content(as_str=True), **role.get_dict()}
    return data

merge

merge(other: 'BaseMessage') -> Optional['Message']

Merge the message with another message.

Source code in src/appl/core/message.py
def merge(self: "Message", other: "BaseMessage") -> Optional["Message"]:
    """Merge the message with another message."""
    if self.should_merge(other):
        # merge the content
        res = self.model_copy()
        if isinstance(other.content, ContentList) and not isinstance(
            res.content, ContentList
        ):
            res.content = ContentList(contents=[res.content])
        res.content += other.content
        return res
    return None

should_merge

should_merge(other: 'BaseMessage') -> bool

Whether the message should be merged with the other message.

Source code in src/appl/core/message.py
def should_merge(self, other: "BaseMessage") -> bool:
    """Whether the message should be merged with the other message."""
    if self.is_tool or other.is_tool:
        # not merge tool messages
        return False
    if self.content is None or other.content is None:
        return False
    return self.role == other.role

str_with_default_role

str_with_default_role(
    default_role: Optional[MessageRole] = None,
) -> str

Return the string representation of the message with default role.

Source code in src/appl/core/message.py
def str_with_default_role(self, default_role: Optional[MessageRole] = None) -> str:
    """Return the string representation of the message with default role."""
    return self._get_colored_content(self.role or default_role)

validate_role

validate_role(target_role: MessageRole) -> None

Validate the role of the message, fill the role if not provided.

Source code in src/appl/core/message.py
def validate_role(self, target_role: MessageRole) -> None:
    """Validate the role of the message, fill the role if not provided."""
    target_type = target_role.type
    if target_type is None:
        raise ValueError("Target role type must be provided.")
    if self.role is None:
        self.role = target_role
    elif self.role.type is None:
        # fill the role type as the target type
        self.role = MessageRole(type=target_type, name=self.role.name)
    elif self.role.type != target_type:
        raise ValueError(f"Invalid role for {target_type} message: {self.role}")

ToolCall

Bases: BaseModel

The class representing a tool call.

args class-attribute instance-attribute

args: str = Field(
    ...,
    description="The arguments to call the function with.",
)

The arguments to call the function with.

id class-attribute instance-attribute

id: str = Field(..., description="The ID of the tool call.")

The ID of the tool call.

name class-attribute instance-attribute

name: str = Field(
    ..., description="The name of the function to call."
)

The name of the function to call.

from_dict classmethod

from_dict(call: Dict) -> ToolCall

Create a ToolCall from a dictionary in the OpenAI format.

Source code in src/appl/core/tool.py
@classmethod
def from_dict(cls, call: Dict) -> "ToolCall":
    """Create a ToolCall from a dictionary in the OpenAI format."""
    # throw error if incorrect format
    return cls(
        id=call["id"],
        name=call["function"]["name"],
        args=call["function"]["arguments"],
    )

from_openai_tool_call classmethod

from_openai_tool_call(
    call: ChatCompletionMessageToolCall,
) -> ToolCall

Create a ToolCall from an OpenAI tool call.

Source code in src/appl/core/tool.py
@classmethod
def from_openai_tool_call(cls, call: ChatCompletionMessageToolCall) -> "ToolCall":
    """Create a ToolCall from an OpenAI tool call."""
    return cls(
        id=call.id,
        name=call.function.name,
        args=call.function.arguments,
    )

get_dict

get_dict()

Get the OpenAI format dictionary representation of the tool call.

Source code in src/appl/core/tool.py
def get_dict(self):
    """Get the OpenAI format dictionary representation of the tool call."""
    return {
        "id": self.id,
        "type": "function",
        "function": {
            "name": self.name,
            "arguments": self.args,
        },
    }

ToolMessage

ToolMessage(
    content: Any = None,
    *,
    role: Optional[MessageRole] = None,
    tool_call_id: str = "",
    **kwargs: Any
)

Bases: BaseMessage

A tool message in the conversation.

Source code in src/appl/core/message.py
def __init__(
    self,
    content: Any = None,
    *,
    role: Optional[MessageRole] = None,
    tool_call_id: str = "",
    **kwargs: Any,
) -> None:
    """Create a tool message with content and extra arguments."""
    super().__init__(
        content=content, role=role, tool_call_id=tool_call_id, **kwargs
    )
    self.validate_role(TOOL_ROLE)

is_ai property

is_ai: bool

Whether the message is an assistant message.

is_system property

is_system: bool

Whether the message is a system message.

is_tool property

is_tool: bool

Whether the message is a tool message.

is_user property

is_user: bool

Whether the message is a user message.

get_content

get_content(as_str: bool = False) -> Any

Get the content of the message.

Materialize the content if it is a FutureValue.

Source code in src/appl/core/message.py
def get_content(self, as_str: bool = False) -> Any:
    """Get the content of the message.

    Materialize the content if it is a FutureValue.
    """
    content = self.content
    if content is not None:
        if isinstance(content, ContentList):
            return content.get_contents()  # return a list of dict
        if isinstance(content, FutureValue):
            # materialize the content
            content = content.val
        if as_str:  # not apply to ContentList
            content = str(content)
    return content

get_dict

get_dict(*args: Any, **kwargs: Any) -> Dict[str, Any]

Return a dict representation of the message.

Source code in src/appl/core/message.py
def get_dict(self, *args: Any, **kwargs: Any) -> Dict[str, Any]:
    """Return a dict representation of the message."""
    data = super().get_dict(*args, **kwargs)
    data["tool_call_id"] = self.tool_call_id
    return data

merge

merge(other: 'BaseMessage') -> Optional['Message']

Merge the message with another message.

Source code in src/appl/core/message.py
def merge(self: "Message", other: "BaseMessage") -> Optional["Message"]:
    """Merge the message with another message."""
    if self.should_merge(other):
        # merge the content
        res = self.model_copy()
        if isinstance(other.content, ContentList) and not isinstance(
            res.content, ContentList
        ):
            res.content = ContentList(contents=[res.content])
        res.content += other.content
        return res
    return None

should_merge

should_merge(other: 'BaseMessage') -> bool

Whether the message should be merged with the other message.

Source code in src/appl/core/message.py
def should_merge(self, other: "BaseMessage") -> bool:
    """Whether the message should be merged with the other message."""
    if self.is_tool or other.is_tool:
        # not merge tool messages
        return False
    if self.content is None or other.content is None:
        return False
    return self.role == other.role

str_with_default_role

str_with_default_role(
    default_role: Optional[MessageRole] = None,
) -> str

Return the string representation of the message with default role.

Source code in src/appl/core/message.py
def str_with_default_role(self, default_role: Optional[MessageRole] = None) -> str:
    """Return the string representation of the message with default role."""
    return self._get_colored_content(self.role or default_role)

validate_role

validate_role(target_role: MessageRole) -> None

Validate the role of the message, fill the role if not provided.

Source code in src/appl/core/message.py
def validate_role(self, target_role: MessageRole) -> None:
    """Validate the role of the message, fill the role if not provided."""
    target_type = target_role.type
    if target_type is None:
        raise ValueError("Target role type must be provided.")
    if self.role is None:
        self.role = target_role
    elif self.role.type is None:
        # fill the role type as the target type
        self.role = MessageRole(type=target_type, name=self.role.name)
    elif self.role.type != target_type:
        raise ValueError(f"Invalid role for {target_type} message: {self.role}")

UserMessage

UserMessage(
    content: Any = None,
    *,
    role: Optional[MessageRole] = None,
    **kwargs: Any
)

Bases: BaseMessage

A user message in the conversation.

Source code in src/appl/core/message.py
def __init__(
    self,
    content: Any = None,
    *,
    role: Optional[MessageRole] = None,
    **kwargs: Any,
) -> None:
    """Create a user message with content and extra arguments."""
    super().__init__(content=content, role=role, **kwargs)
    self.validate_role(USER_ROLE)

is_ai property

is_ai: bool

Whether the message is an assistant message.

is_system property

is_system: bool

Whether the message is a system message.

is_tool property

is_tool: bool

Whether the message is a tool message.

is_user property

is_user: bool

Whether the message is a user message.

get_content

get_content(as_str: bool = False) -> Any

Get the content of the message.

Materialize the content if it is a FutureValue.

Source code in src/appl/core/message.py
def get_content(self, as_str: bool = False) -> Any:
    """Get the content of the message.

    Materialize the content if it is a FutureValue.
    """
    content = self.content
    if content is not None:
        if isinstance(content, ContentList):
            return content.get_contents()  # return a list of dict
        if isinstance(content, FutureValue):
            # materialize the content
            content = content.val
        if as_str:  # not apply to ContentList
            content = str(content)
    return content

get_dict

get_dict(
    default_role: Optional[MessageRole] = None,
) -> Dict[str, Any]

Return a dict representation of the message.

Source code in src/appl/core/message.py
def get_dict(self, default_role: Optional[MessageRole] = None) -> Dict[str, Any]:
    """Return a dict representation of the message."""
    # materialize the content using str()
    role = self.role or default_role
    if role is None:
        raise ValueError("Role or default role must be provided.")
    if role.type is None:
        if default_role and default_role.type:
            role = MessageRole(type=default_role.type, name=role.name)
        else:
            raise ValueError("Role type must be provided.")
    data = {"content": self.get_content(as_str=True), **role.get_dict()}
    return data

merge

merge(other: 'BaseMessage') -> Optional['Message']

Merge the message with another message.

Source code in src/appl/core/message.py
def merge(self: "Message", other: "BaseMessage") -> Optional["Message"]:
    """Merge the message with another message."""
    if self.should_merge(other):
        # merge the content
        res = self.model_copy()
        if isinstance(other.content, ContentList) and not isinstance(
            res.content, ContentList
        ):
            res.content = ContentList(contents=[res.content])
        res.content += other.content
        return res
    return None

should_merge

should_merge(other: 'BaseMessage') -> bool

Whether the message should be merged with the other message.

Source code in src/appl/core/message.py
def should_merge(self, other: "BaseMessage") -> bool:
    """Whether the message should be merged with the other message."""
    if self.is_tool or other.is_tool:
        # not merge tool messages
        return False
    if self.content is None or other.content is None:
        return False
    return self.role == other.role

str_with_default_role

str_with_default_role(
    default_role: Optional[MessageRole] = None,
) -> str

Return the string representation of the message with default role.

Source code in src/appl/core/message.py
def str_with_default_role(self, default_role: Optional[MessageRole] = None) -> str:
    """Return the string representation of the message with default role."""
    return self._get_colored_content(self.role or default_role)

validate_role

validate_role(target_role: MessageRole) -> None

Validate the role of the message, fill the role if not provided.

Source code in src/appl/core/message.py
def validate_role(self, target_role: MessageRole) -> None:
    """Validate the role of the message, fill the role if not provided."""
    target_type = target_role.type
    if target_type is None:
        raise ValueError("Target role type must be provided.")
    if self.role is None:
        self.role = target_role
    elif self.role.type is None:
        # fill the role type as the target type
        self.role = MessageRole(type=target_type, name=self.role.name)
    elif self.role.type != target_type:
        raise ValueError(f"Invalid role for {target_type} message: {self.role}")

as_func

as_func(
    func: Callable[P, T],
    _globals: Optional[Dict] = None,
    _locals: Optional[Dict] = None,
) -> Callable[P, T]

Fill the globals and locals for a ppl function.

When locals not provided, it will use the locals from the caller.

Source code in src/appl/func.py
def as_func(
    func: Callable[P, T],
    _globals: Optional[Dict] = None,
    _locals: Optional[Dict] = None,
) -> Callable[P, T]:
    """Fill the globals and locals for a ppl function.

    When locals not provided, it will use the locals from the caller.
    """
    frame = inspect.currentframe()
    if _locals is None and frame is not None and frame.f_back is not None:
        _locals = frame.f_back.f_locals
    return partial(func, _globals=_globals, _locals=_locals)

as_tool

as_tool(func: Callable, **kwargs: Any) -> Tool

Wrap a given function with additional predefined arguments into a Tool.

This function allows converting a standard function into a 'Tool' by specifying the function and any additional arguments that should be pre-defined for it. These additional arguments are passed as keyword arguments and will be bound to the function within the Tool object, so that these arguments are not required when using this tool.

Parameters:

  • func (Callable) –

    The function to be converted into a Tool.

  • **kwargs (Any, default: {} ) –

    Keyword arguments that will be predefined for the function in the Tool object.

Returns:

  • Tool ( Tool ) –

    An object encapsulating the given function and its predefined arguments, ready to be utilized as a Tool.

Examples:

Given a function move_disk that requires an environment and two pegs to move a disk from one peg to another in the Tower of Hanoi puzzle, one can create a tool with a predefined environment by:

def move_disk(env: HanoiEnv, from_peg: int, to_peg: int) -> str:
    pass

env = HanoiEnv()
tools = [as_tool(move_disk, env=env)]

In this example, move_disk is encapsulated into a Tool with env predefined, so only from_peg and to_peg are required.

Source code in src/appl/func.py
def as_tool(func: Callable, **kwargs: Any) -> Tool:
    """Wrap a given function with additional predefined arguments into a Tool.

    This function allows converting a standard function into a 'Tool' by
    specifying the function and any additional arguments that should be
    pre-defined for it. These additional arguments are passed as keyword
    arguments and will be bound to the function within the Tool object,
    so that these arguments are not required when using this tool.

    Args:
        func (Callable):
            The function to be converted into a Tool.
        **kwargs:
            Keyword arguments that will be predefined for the function in
            the Tool object.

    Returns:
        Tool:
            An object encapsulating the given function and its predefined
            arguments, ready to be utilized as a Tool.

    Examples:
        Given a function `move_disk` that requires an environment and two
        pegs to move a disk from one peg to another in the Tower of Hanoi
        puzzle, one can create a tool with a predefined environment by:

        ```python
        def move_disk(env: HanoiEnv, from_peg: int, to_peg: int) -> str:
            pass

        env = HanoiEnv()
        tools = [as_tool(move_disk, env=env)]
        ```

        In this example, `move_disk` is encapsulated into a Tool with `env`
        predefined, so only `from_peg` and `to_peg` are required.
    """
    return Tool(func=func, **kwargs)

as_tool_choice

as_tool_choice(obj: Union[str, Callable, BaseTool]) -> dict

Build a tool choice argument for the OpenAI API from an object.

Source code in src/appl/func.py
def as_tool_choice(obj: Union[str, Callable, BaseTool]) -> dict:
    """Build a tool choice argument for the OpenAI API from an object."""
    if isinstance(obj, BaseTool):
        name = obj.name
    else:
        name = getattr(obj, "__name__", str(obj))
    return dict(type="function", function=dict(name=name))

auto_prime_gen

auto_prime_gen(gen_func)

Decorate a generator to automatically prime the generator.

Source code in src/appl/func.py
def auto_prime_gen(gen_func):
    """Decorate a generator to automatically prime the generator."""

    def wrapper(*args, **kwargs):
        gen = gen_func(*args, **kwargs)
        next(gen)  # prime the generator
        return gen

    return wrapper

build_tools

build_tools(
    tools: OneOrMany[Union[BaseTool, Callable]]
) -> Sequence[BaseTool]

Build a list of tools from the given tools or functions.

Source code in src/appl/func.py
def build_tools(tools: OneOrMany[Union[BaseTool, Callable]]) -> Sequence[BaseTool]:
    """Build a list of tools from the given tools or functions."""

    def convert_to_tool(tool: Union[BaseTool, Callable]) -> BaseTool:
        if isinstance(tool, BaseTool):
            return tool
        if callable(tool):
            return as_tool(tool)
        raise ValueError(f"Invalid tool: {tool}")

    # process tools
    if isinstance(tools, BaseTool) or callable(tools):
        return [convert_to_tool(tools)]
    if isinstance(tools, Sequence):
        return [convert_to_tool(tool) for tool in tools]
    raise ValueError(f"Invalid tools: {tools}")

call

call(
    func: Callable,
    *args: Any,
    use_process: bool = False,
    **kwargs: Any
) -> CallFuture

Create a CallFuture object from a function and its arguments.

The CallFuture object will call the function in a separate thread or process, therefore the function need to be thread-safe or process-safe.

Source code in src/appl/func.py
def call(
    func: Callable, *args: Any, use_process: bool = False, **kwargs: Any
) -> CallFuture:
    """Create a CallFuture object from a function and its arguments.

    The CallFuture object will call the function in a separate thread or process,
    therefore the function need to be thread-safe or process-safe.
    """
    return CallFuture(func, *args, use_process=use_process, **kwargs)

convo

convo(_ctx: Optional[PromptContext] = None) -> Conversation

Return the full conversation in the context.

Similar to globals() in Python in some sense.

Source code in src/appl/func.py
@need_ctx
def convo(_ctx: Optional[PromptContext] = None) -> Conversation:
    """Return the full conversation in the context.

    Similar to globals() in Python in some sense.
    """
    # Added default value for _ctx to avoid the warning of type checker
    if _ctx is None:
        raise ValueError(
            "PromptContext is required for convo, "
            "this function should be called within @ppl function."
        )
    return _ctx.messages

empty_line

empty_line(num_lines: int = 1) -> PromptRecords

Create empty lines regardless of other compositor.

Source code in src/appl/func.py
def empty_line(num_lines: int = 1) -> PromptRecords:
    """Create empty lines regardless of other compositor."""
    records = PromptRecords()
    records.record(PrinterPush(separator="\n", indexing=Indexing(), new_indent=""))
    for _ in range(num_lines):
        records.record("")
    records.record(PrinterPop())
    return records

gen

gen(
    server: Optional[str] = None,
    *,
    max_tokens: Optional[int] = None,
    stop: MaybeOneOrMany[str] = None,
    temperature: Optional[float] = None,
    top_p: Optional[float] = None,
    n: Optional[int] = None,
    tools: OneOrMany[Union[BaseTool, Callable]] = [],
    tool_format: str = "auto",
    stream: Optional[bool] = None,
    response_format: Optional[Union[dict, Type[M]]] = None,
    response_model: Optional[Type[M]] = None,
    mock_response: Optional[
        Union[CompletionResponse, str]
    ] = None,
    _ctx: Optional[PromptContext] = None,
    **kwargs: Any
) -> Generation[M]

Send a generation request to the LLM backend.

Parameters:

  • server (str, default: None ) –

    name of the backend server. Defaults to the default server set in the configs.

  • max_tokens (int, default: None ) –

    maximum number of tokens to generate. Defaults to None.

  • stop (str | Sequence[str], default: None ) –

    stop sequence(s). Defaults to None.

  • temperature (float, default: None ) –

    temperature for sampling. Defaults to None.

  • top_p (float, default: None ) –

    nucleus sampling parameter. Defaults to None.

  • n (int, default: None ) –

    number of choices to generate. Defaults to 1.

  • tools (BaseTool | Callable | Sequence[BaseTool | Callable], default: [] ) –

    tools can be used. Defaults to None.

  • tool_format (str, default: 'auto' ) –

    the format for the tools. Defaults to "auto".

  • stream (bool, default: None ) –

    whether to stream the results. Defaults to False.

  • response_format (Union[dict, Type[M]], default: None ) –

    OpenAI's argument specifies the response format. Defaults to None.

  • response_model (Type[M], default: None ) –

    instructor's argument specifies the response format as a Pydantic model. Recommended to use response_format instead. Defaults to None.

  • mock_response (Union[CompletionResponse, str], default: None ) –

    mock response for testing. Defaults to None.

  • _ctx (PromptContext, default: None ) –

    prompt context, will be automatically filled.

  • kwargs (Any, default: {} ) –

    extra arguments for the generation.

Returns:

  • Generation ( Generation[M] ) –

    a future object representing the generation result

Source code in src/appl/func.py
@need_ctx
def gen(
    server: Optional[str] = None,
    *,
    max_tokens: Optional[int] = None,
    stop: MaybeOneOrMany[str] = None,
    temperature: Optional[float] = None,
    top_p: Optional[float] = None,
    n: Optional[int] = None,
    tools: OneOrMany[Union[BaseTool, Callable]] = [],  # TODO: support dict
    tool_format: str = "auto",
    stream: Optional[bool] = None,
    response_format: Optional[Union[dict, Type[M]]] = None,
    response_model: Optional[Type[M]] = None,
    mock_response: Optional[Union[CompletionResponse, str]] = None,
    _ctx: Optional[PromptContext] = None,
    **kwargs: Any,
) -> Generation[M]:
    """Send a generation request to the LLM backend.

    Args:
        server (str, optional):
            name of the backend server. Defaults to the default server set in the configs.
        max_tokens (int, optional): maximum number of tokens to generate. Defaults to None.
        stop (str|Sequence[str], optional): stop sequence(s). Defaults to None.
        temperature (float, optional): temperature for sampling. Defaults to None.
        top_p (float, optional): nucleus sampling parameter. Defaults to None.
        n (int, optional): number of choices to generate. Defaults to 1.
        tools (BaseTool|Callable|Sequence[BaseTool|Callable], optional):
            tools can be used. Defaults to None.
        tool_format (str, optional): the format for the tools. Defaults to "auto".
        stream (bool, optional): whether to stream the results. Defaults to False.
        response_format (Union[dict, Type[M]], optional):
            OpenAI's argument specifies the response format. Defaults to None.
        response_model (Type[M], optional):
            instructor's argument specifies the response format as a Pydantic model.
            Recommended to use `response_format` instead. Defaults to None.
        mock_response (Union[CompletionResponse, str], optional):
            mock response for testing. Defaults to None.
        _ctx (PromptContext): prompt context, will be automatically filled.
        kwargs (Any): extra arguments for the generation.

    Returns:
        Generation: a future object representing the generation result
    """
    backend_server = server_manager.get_server(server)
    if _ctx is None:
        raise ValueError(
            "PromptContext is required for generation."
            "Normally, it should be automatically filled."
        )
    messages = _ctx.messages
    messages.materialize()  # materialize the messages
    # TODO: double check the correctness
    messages = copy.deepcopy(messages)  # freeze the prompt for the generation

    if response_format is not None and response_model is not None:
        raise ValueError("response_format and response_model cannot be used together.")

    if (
        isinstance(get_origin(response_format), type)
        or get_origin(response_format) is Literal
        or isinstance(response_format, type)
        and not issubclass(response_format, BaseModel)
    ):

        class Response(BaseModel):
            response: response_format  # type: ignore

        response_format = Response  # type: ignore
        kwargs["_wrapped_attribute"] = "response"

    create_args = GenArgs(
        model=backend_server.model_name,
        messages=messages,
        max_tokens=max_tokens,
        stop=stop,
        temperature=temperature,
        top_p=top_p,
        n=n,
        tools=build_tools(tools),
        tool_format=tool_format,  # type: ignore
        stream=stream,
        response_format=response_format,  # type: ignore
        response_model=response_model,  # type: ignore
    )

    generation = Generation[M](
        backend_server, create_args, mock_response=mock_response, _ctx=_ctx, **kwargs
    )

    @_langsmith_traceable(name=generation.id, metadata={"appl": "gen"})  # type: ignore
    def langsmith_trace(*args: Any, **kwargs: Any) -> None:
        pass

    langsmith_trace(backend_server, create_args, _ctx=_ctx, **kwargs)
    return generation

get_var

get_var(name: str, _ctx: PromptContext) -> Any

Get a variable by name from the prompt context.

Source code in src/appl/func.py
@need_ctx
def get_var(name: str, _ctx: PromptContext) -> Any:
    """Get a variable by name from the prompt context."""
    return getattr(_ctx, name)

grow

grow(
    content: Any, *, _ctx: Optional[PromptContext] = None
) -> None

Append the content to the prompt.

Source code in src/appl/func.py
@need_ctx
def grow(content: Any, *, _ctx: Optional[PromptContext] = None) -> None:
    """Append the content to the prompt."""
    if _ctx is None:
        raise ValueError(
            "PromptContext is required for appending. "
            "Normally, it should be automatically filled."
        )
    appl_execute(content, _ctx=_ctx)

is_string

is_string(s: Any) -> bool

Check if the object is a StringFuture or str.

Source code in src/appl/core/types/futures.py
def is_string(s: Any) -> bool:
    """Check if the object is a StringFuture or str."""
    return isinstance(s, StringFuture) or isinstance(s, str)

need_ctx

need_ctx(func: Callable[P, T]) -> Callable[P, T]

Decorate a function to mark it as needing a prompt context.

Source code in src/appl/func.py
def need_ctx(func: Callable[P, T]) -> Callable[P, T]:
    """Decorate a function to mark it as needing a prompt context."""
    setattr(func, "__need_ctx__", True)
    return func

openai_tool_schema

openai_tool_schema(func: Callable) -> dict

Build openai tool schema from a function.

Source code in src/appl/func.py
def openai_tool_schema(func: Callable) -> dict:
    """Build openai tool schema from a function."""
    return as_tool(func).openai_schema

partial

partial(
    func: Callable[..., R], *args: Any, **kwargs: Any
) -> Callable[..., R]

Create a new function with partial application of the given arguments and keywords.

Source code in src/appl/func.py
def partial(func: Callable[..., R], *args: Any, **kwargs: Any) -> Callable[..., R]:
    """Create a new function with partial application of the given arguments and keywords."""
    new_func = functools.partial(func, *args, **kwargs)
    if getattr(func, "__need_ctx__", True):
        new_func = need_ctx(new_func)  # type: ignore
    return new_func

ppl

ppl(
    ctx: Union[str, F] = "new",
    comp: Optional[Compositor] = None,
    *,
    default_return: Optional[Literal[prompt]] = None,
    include_docstring: bool = False,
    auto_prime: bool = False,
    num_extra_wrappers: int = 0,
    new_ctx_func: Callable = PromptContext
) -> Union[Callable[[F], F], F]

Decorate a function to mark it as an APPL function.

The function contains a prompt context, which could be same as or copied from its caller function, or created from scratch, or resumed from the last run.

Parameters:

  • ctx (str, default: 'new' ) –

    the method to deal with the child context, available methods includes:

    • (default) "new" or "new_ctx": create a brand new context.
    • "copy" or "copy_ctx": copy from the parent's context, the change will not affect the parent's context.
    • "same" or "same_ctx": use the same context as the parent's, the change will affect the parent's context.
    • "resume" or "resume_ctx": resume its own context from the last run. For the first run, it will use the parent's context.
  • comp (Compositor, default: None ) –

    the default compositor to be used. Defaults to None.

  • default_return (str, default: None ) –

    The default return value, "prompt" means return the prompt within the function. Defaults to None.

  • include_docstring (bool, default: False ) –

    set to True to include the triple-quoted docstring in the prompt. Defaults to False.

  • auto_prime (bool, default: False ) –

    set to True to automatically prime the generator. Defaults to False.

  • num_extra_wrappers (int, default: 0 ) –

    the number of extra wrappers to go back to the caller frame.

  • new_ctx_func (Callable, default: PromptContext ) –

    the function to create a new context. Defaults to PromptContext.

Source code in src/appl/func.py
def ppl(
    ctx: Union[str, F] = "new",
    comp: Optional[Compositor] = None,
    *,
    default_return: Optional[Literal["prompt"]] = None,
    include_docstring: bool = False,
    auto_prime: bool = False,
    num_extra_wrappers: int = 0,
    new_ctx_func: Callable = PromptContext,
) -> Union[Callable[[F], F], F]:
    """Decorate a function to mark it as an APPL function.

    The function contains a prompt context, which could be same as or
    copied from its caller function, or created from scratch, or resumed
    from the last run.

    Args:
        ctx (str):
            the method to deal with the child context, available methods includes:

            - (default) "new" or "new_ctx": create a brand new context.
            - "copy" or "copy_ctx":
                copy from the parent's context, the change will not
                affect the parent's context.
            - "same" or "same_ctx":
                use the same context as the parent's, the change will
                affect the parent's context.
            - "resume" or "resume_ctx":
                resume its own context from the last run.
                For the first run, it will use the parent's context.

        comp (Compositor, optional):
            the default compositor to be used. Defaults to None.
        default_return (str, optional):
            The default return value, "prompt" means return the prompt within
            the function. Defaults to None.
        include_docstring (bool, optional):
            set to True to include the triple-quoted docstring in the prompt.
            Defaults to False.
        auto_prime (bool, optional):
            set to True to automatically prime the generator. Defaults to False.
        num_extra_wrappers (int, optional):
            the number of extra wrappers to go back to the caller frame.
        new_ctx_func (Callable, optional):
            the function to create a new context. Defaults to PromptContext.
    """
    # The same doc string as PromptFunc (excluding the func argument)

    ctx_method: str = "new"

    def decorator(func: F) -> F:
        """Decorate a function as prompt function."""
        _is_class_method = False
        if "." in (qualname := func.__qualname__):
            # NOTE: this is a workaround for class methods, may not cover all cases
            qualnames = qualname.split(".")
            if qualnames[-2] != "<locals>":
                _is_class_method = True

        # ? should disable such usage?
        # if not _is_class_method and "<locals>" in qualname and ctx_method == "resume":
        #     raise ValueError("Cannot use 'resume' with local functions.")
        prompt_func = PromptFunc(
            func, ctx_method, comp, default_return, include_docstring, new_ctx_func
        )

        @need_ctx
        @_langsmith_traceable(name=func.__name__, metadata={"appl": "func"})  # type: ignore
        @functools.wraps(func)
        def wrapper(
            *args: Any,
            _globals: Optional[Dict] = None,
            _locals: Optional[Dict] = None,
            **kwargs: Any,
        ) -> Any:
            # get the function qualname and count the number of runs
            func_name = prompt_func._qualname
            func_run_cnt = inc_global_var(func_name) - 1
            func_name += f"_{func_run_cnt}"
            # add to trace (function call)
            if global_vars.trace_engine:
                # NOTE: compute repr(args) and repr(kwargs) might be time-consuming
                add_to_trace(
                    FunctionCallEvent(
                        name=func_name,
                        args={"args": repr(args), "kwargs": repr(kwargs)},
                    )
                )
            # closure variables
            freevars = prompt_func.compiled_func.freevars
            if _locals is None:
                # * Workaround for closure variables
                # Default: use the locals from the caller
                frame = inspect.currentframe()
                num_wrappers = (3 if auto_prime else 2) + num_extra_wrappers
                for _ in range(num_wrappers):
                    if frame is None:
                        raise RuntimeError("No caller frame found")
                    # back to @_langsmith_traceable frame, and the caller frame
                    frame = frame.f_back
                if frame is None:
                    raise RuntimeError("No caller frame found")
                _locals = frame.f_locals

                if len(freevars):
                    vars = {var: _locals.get(var, "NotFound") for var in freevars}
                    logger.debug(
                        f"For freevars of function {func.__name__}, "
                        f"automatically using locals from the caller: {vars}"
                    )
                    for var in freevars:
                        if var not in _locals:
                            logger.warning(
                                f"could not find variable {var} automatically from the caller frame."
                            )
            results = prompt_func(
                *args,
                _globals=_globals,
                _locals=_locals,
                _is_class_method=_is_class_method,
                **kwargs,
            )

            # add to trace (function return)
            add_to_trace(FunctionReturnEvent(name=func_name))  # ret=results
            return results

        if auto_prime:
            wrapper = auto_prime_gen(wrapper)
        setattr(wrapper, "_prompt_func", prompt_func)
        return wrapper  # type: ignore

    if isinstance(ctx, str):
        ctx_method = ctx
        # used as a decorator with arguments (e.g., @ppl(ctx="copy"))
        # returns a decorator that takes a function as input
        return decorator
    else:
        # used as a single decorator (i.e., @ppl)
        return decorator(func=ctx)  # returns a wrapper

records

records(
    _ctx: Optional[PromptContext] = None,
) -> PromptRecords

Return the prompt defined in the current function.

Similar to locals() in Python in some sense.

Source code in src/appl/func.py
@need_ctx
def records(_ctx: Optional[PromptContext] = None) -> PromptRecords:
    """Return the prompt defined in the current function.

    Similar to locals() in Python in some sense.
    """
    # add default value for _ctx to avoid the warning of type checker
    if _ctx is None:
        raise ValueError(
            "PromptContext is required for records, "
            "this function should be called within @ppl function."
        )
    return _ctx.records

reset_context

reset_context(func: Callable) -> None

Reset the context for APPL functions with the 'resume' context method.

Source code in src/appl/func.py
def reset_context(func: Callable) -> None:
    """Reset the context for APPL functions with the 'resume' context method."""
    if prompt_func := getattr(func, "_prompt_func", None):
        if reset_func := getattr(prompt_func, "_reset_context_func", None):
            reset_func()
            logger.info(f"Context reset for function {func.__name__}")
        else:
            logger.warning(f"Nothing to reset for function {func.__name__}")
    else:
        logger.warning(f"Not an APPL function: {func.__name__}, cannot reset context.")

str_future

str_future(obj: Any) -> StringFuture

Convert an object to a StringFuture object.

Source code in src/appl/func.py
def str_future(obj: Any) -> StringFuture:
    """Convert an object to a StringFuture object."""
    return StringFuture(obj)

wraps

wraps(func: F) -> Callable[[F], F]

Replace the functools.wraps to take care of the type hint.

Source code in src/appl/func.py
def wraps(func: F) -> Callable[[F], F]:
    """Replace the functools.wraps to take care of the type hint."""

    def decorator(wrapper: F) -> F:
        return functools.wraps(func)(wrapper)  # type: ignore

    return decorator