Skip to main content

Conditional Execution

The when parameter ensures that a step is executed only if the specified condition is met.
async def send_alert(message: str):
    print(f"Alert: {message}")

workflow = (Workflow(name='my_workflow')
    .step(func_1, x="a")
    .step(func_2, y="b")
    .step(func_3)
    .step(send_alert, message="Process completed",
        when=lambda: get_run_context().step_span("func_1").output and 
            get_run_context().step_span("func_3").output
    ) 
)
In this case, send_alert step will be executed after both func_1 and func_2 have been executed and have an output.

Use case: Error handling - Fallback Logic

Conditional execution allows to define alternative workflow paths based on the success or failure of previous steps. This is especially useful when interacting with external services or resources that may be unavailable.
from timbal import Workflow
from timbal.state import get_run_context

def api_call() -> dict:
    # API call logic here
    return {"status": "ok", "data": "success"}  # {"status": "error"} if failed

def success_handler(data: str) -> str:
    return f"Success: {data}"

def error_handler() -> str:
    return "Fallback: Operation failed"

workflow = (
    Workflow(name="conditional_workflow")
    .step(api_call)
    .step(success_handler, 
          data=lambda: get_run_context().step_span("api_call").output["data"],
          when=lambda: get_run_context().step_span("api_call").output["status"] == "ok")
    .step(error_handler,
          when=lambda: get_run_context().step_span("api_call").output["status"] == "error")
)
If a step’s condition is never met, that step and all of their successors are skipped. This prevents dead-end execution paths.

Smart Workflows: Conditional Processing using Agents

Let AI decide which workflow steps to execute:
# Decision agent
classifier = Agent(
    name="classifier",
    model="openai/gpt-4o-mini",
    system_prompt="Respond with 'urgent' or 'normal'."
)

# Processing agents
urgent_handler = Agent(
    name="urgent_handler",
    model="openai/gpt-4o",
    system_prompt="Handle urgent requests quickly."
)

normal_handler = Agent(
    name="normal_handler",
    model="openai/gpt-4o-mini", 
    system_prompt="Handle normal requests efficiently."
)

# Smart workflow
workflow = (
    Workflow(name="smart_processor")
    .step(classifier)
    .step(urgent_handler, when=lambda: "urgent" in get_run_context().step_span("classifier").output.content[0].text())
    .step(normal_handler, when=lambda: "normal" in get_run_context().step_span("classifier").output.content[0].text())
)