Create a run, wait until it finishes and return the final state.
wait(
self,
thread_id: str | None,
assistant_id: str,
*,
input: Input | None = None,
command: Command | None = None,
metadata: Mapping[str, Any] | None = None,
config: Config | None = None,
context: Context | None = None,
checkpoint: Checkpoint | None = None,
checkpoint_id: str | None = None,
checkpoint_during: bool | None = None,
interrupt_before: All | Sequence[str] | None = None,
interrupt_after: All | Sequence[str] | None = None,
webhook: str | None = None,
on_disconnect: DisconnectMode | None = None,
on_completion: OnCompletionBehavior | None = None,
multitask_strategy: MultitaskStrategy | None = None,
if_not_exists: IfNotExists | None = None,
after_seconds: int | None = None,
raise_error: bool = True,
headers: Mapping[str, str] | None = None,
params: QueryParamTypes | None = None,
on_run_created: Callable[[RunCreateMetadata], None] | None = None,
durability: Durability | None = None
) -> builtins.list[dict] | dict[str, Any]client = get_client(url="http://localhost:2024")
final_state_of_run = await client.runs.wait(
thread_id=None,
assistant_id="agent",
input={"messages": [{"role": "user", "content": "how are you?"}]},
metadata={"name":"my_run"},
context={"model_name": "anthropic"},
interrupt_before=["node_to_stop_before_1","node_to_stop_before_2"],
interrupt_after=["node_to_stop_after_1","node_to_stop_after_2"],
webhook="https://my.fake.webhook.com",
multitask_strategy="interrupt"
)
print(final_state_of_run)
-------------------------------------------------------------------------------------------------------------------------------------------
{
'messages': [
{
'content': 'how are you?',
'additional_kwargs': {},
'response_metadata': {},
'type': 'human',
'name': None,
'id': 'f51a862c-62fe-4866-863b-b0863e8ad78a',
'example': False
},
{
'content': "I'm doing well, thanks for asking! I'm an AI assistant created by Anthropic to be helpful, honest, and harmless.",
'additional_kwargs': {},
'response_metadata': {},
'type': 'ai',
'name': None,
'id': 'run-bf1cd3c6-768f-4c16-b62d-ba6f17ad8b36',
'example': False,
'tool_calls': [],
'invalid_tool_calls': [],
'usage_metadata': None
}
]
}| Name | Type | Description |
|---|---|---|
thread_id* | str | None | the thread ID to create the run on.
If |
assistant_id* | str | The assistant ID or graph name to run. If using graph name, will default to first assistant created from that graph. |
input | Input | None | Default: NoneThe input to the graph. |
command | Command | None | Default: NoneA command to execute. Cannot be combined with input. |
metadata | Mapping[str, Any] | None | Default: NoneMetadata to assign to the run. |
config | Config | None | Default: NoneThe configuration for the assistant. |
context | Context | None | Default: NoneStatic context to add to the assistant. |
checkpoint | Checkpoint | None | Default: NoneThe checkpoint to resume from. |
checkpoint_during | bool | None | Default: None(deprecated) Whether to checkpoint during the run (or only at the end/interruption). |
interrupt_before | All | Sequence[str] | None | Default: NoneNodes to interrupt immediately before they get executed. |
interrupt_after | All | Sequence[str] | None | Default: NoneNodes to Nodes to interrupt immediately after they get executed. |
webhook | str | None | Default: NoneWebhook to call after LangGraph API call is done. |
on_disconnect | DisconnectMode | None | Default: NoneThe disconnect mode to use. Must be one of 'cancel' or 'continue'. |
on_completion | OnCompletionBehavior | None | Default: NoneWhether to delete or keep the thread created for a stateless run. Must be one of 'delete' or 'keep'. |
multitask_strategy | MultitaskStrategy | None | Default: NoneMultitask strategy to use. Must be one of 'reject', 'interrupt', 'rollback', or 'enqueue'. |
if_not_exists | IfNotExists | None | Default: NoneHow to handle missing thread. Defaults to 'reject'. Must be either 'reject' (raise error if missing), or 'create' (create new thread). |
after_seconds | int | None | Default: NoneThe number of seconds to wait before starting the run. Use to schedule future runs. |
headers | Mapping[str, str] | None | Default: NoneOptional custom headers to include with the request. |
on_run_created | Callable[[RunCreateMetadata], None] | None | Default: NoneOptional callback to call when a run is created. |
durability | Durability | None | Default: NoneThe durability to use for the run. Values are "sync", "async", or "exit". "async" means checkpoints are persisted async while next graph step executes, replaces checkpoint_during=True "sync" means checkpoints are persisted sync after graph step executes, replaces checkpoint_during=False "exit" means checkpoints are only persisted when the run exits, does not save intermediate steps |