Class: OpenAI::Resources::Beta::Threads::Runs

Inherits:
Object
  • Object
show all
Defined in:
lib/openai/resources/beta/threads/runs.rb,
lib/openai/resources/beta/threads/runs/steps.rb

Defined Under Namespace

Classes: Steps

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(client:) ⇒ Runs

This method is part of a private API. You should avoid using this method if possible, as it may be removed or be changed in the future.

Returns a new instance of Runs.

Parameters:



366
367
368
369
# File 'lib/openai/resources/beta/threads/runs.rb', line 366

def initialize(client:)
  @client = client
  @steps = OpenAI::Resources::Beta::Threads::Runs::Steps.new(client: client)
end

Instance Attribute Details

#stepsOpenAI::Resources::Beta::Threads::Runs::Steps (readonly)



9
10
11
# File 'lib/openai/resources/beta/threads/runs.rb', line 9

def steps
  @steps
end

Instance Method Details

#cancel(run_id, thread_id: , request_options: {}) ⇒ OpenAI::Models::Beta::Threads::Run

Cancels a run that is ‘in_progress`.

Parameters:

  • run_id (String)

    The ID of the run to cancel.

  • thread_id (String)

    The ID of the thread to which this run belongs.

  • request_options (OpenAI::RequestOptions, Hash{Symbol=>Object}, nil)

Returns:

See Also:



260
261
262
263
264
265
266
267
268
269
270
271
272
# File 'lib/openai/resources/beta/threads/runs.rb', line 260

def cancel(run_id, params)
  parsed, options = OpenAI::Beta::Threads::RunCancelParams.dump_request(params)
  thread_id =
    parsed.delete(:thread_id) do
      raise ArgumentError.new("missing required path argument #{_1}")
    end
  @client.request(
    method: :post,
    path: ["threads/%1$s/runs/%2$s/cancel", thread_id, run_id],
    model: OpenAI::Beta::Threads::Run,
    options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options}
  )
end

#create(thread_id, assistant_id: , include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) ⇒ OpenAI::Models::Beta::Threads::Run

See #create_stream_raw for streaming counterpart.

Some parameter documentations has been truncated, see Models::Beta::Threads::RunCreateParams for more details.

Create a run.

Parameters:

Returns:

See Also:



62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# File 'lib/openai/resources/beta/threads/runs.rb', line 62

def create(thread_id, params)
  parsed, options = OpenAI::Beta::Threads::RunCreateParams.dump_request(params)
  if parsed[:stream]
    message = "Please use `#create_stream_raw` for the streaming use case."
    raise ArgumentError.new(message)
  end
  query_params = [:include]
  @client.request(
    method: :post,
    path: ["threads/%1$s/runs", thread_id],
    query: parsed.slice(*query_params),
    body: parsed.except(*query_params),
    model: OpenAI::Beta::Threads::Run,
    options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options}
  )
end

#create_stream_raw(thread_id, assistant_id: , include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) ⇒ OpenAI::Internal::Stream<OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent>

See #create for non-streaming counterpart.

Some parameter documentations has been truncated, see Models::Beta::Threads::RunCreateParams for more details.

Create a run.

Parameters:

Returns:

See Also:



130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
# File 'lib/openai/resources/beta/threads/runs.rb', line 130

def create_stream_raw(thread_id, params)
  parsed, options = OpenAI::Beta::Threads::RunCreateParams.dump_request(params)
  unless parsed.fetch(:stream, true)
    message = "Please use `#create` for the non-streaming use case."
    raise ArgumentError.new(message)
  end
  parsed.store(:stream, true)
  query_params = [:include]
  @client.request(
    method: :post,
    path: ["threads/%1$s/runs", thread_id],
    query: parsed.slice(*query_params),
    headers: {"accept" => "text/event-stream"},
    body: parsed.except(*query_params),
    stream: OpenAI::Internal::Stream,
    model: OpenAI::Beta::AssistantStreamEvent,
    options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options}
  )
end

#list(thread_id, after: nil, before: nil, limit: nil, order: nil, request_options: {}) ⇒ OpenAI::Internal::CursorPage<OpenAI::Models::Beta::Threads::Run>

Some parameter documentations has been truncated, see Models::Beta::Threads::RunListParams for more details.

Returns a list of runs belonging to a thread.

Parameters:

  • thread_id (String)

    The ID of the thread the run belongs to.

  • after (String)

    A cursor for use in pagination. ‘after` is an object ID that defines your place

  • before (String)

    A cursor for use in pagination. ‘before` is an object ID that defines your place

  • limit (Integer)

    A limit on the number of objects to be returned. Limit can range between 1 and 1

  • order (Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order)

    Sort order by the ‘created_at` timestamp of the objects. `asc` for ascending ord

  • request_options (OpenAI::RequestOptions, Hash{Symbol=>Object}, nil)

Returns:

See Also:



235
236
237
238
239
240
241
242
243
244
245
# File 'lib/openai/resources/beta/threads/runs.rb', line 235

def list(thread_id, params = {})
  parsed, options = OpenAI::Beta::Threads::RunListParams.dump_request(params)
  @client.request(
    method: :get,
    path: ["threads/%1$s/runs", thread_id],
    query: parsed,
    page: OpenAI::Internal::CursorPage,
    model: OpenAI::Beta::Threads::Run,
    options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options}
  )
end

#retrieve(run_id, thread_id: , request_options: {}) ⇒ OpenAI::Models::Beta::Threads::Run

Some parameter documentations has been truncated, see Models::Beta::Threads::RunRetrieveParams for more details.

Retrieves a run.

Parameters:

Returns:

See Also:



166
167
168
169
170
171
172
173
174
175
176
177
178
# File 'lib/openai/resources/beta/threads/runs.rb', line 166

def retrieve(run_id, params)
  parsed, options = OpenAI::Beta::Threads::RunRetrieveParams.dump_request(params)
  thread_id =
    parsed.delete(:thread_id) do
      raise ArgumentError.new("missing required path argument #{_1}")
    end
  @client.request(
    method: :get,
    path: ["threads/%1$s/runs/%2$s", thread_id, run_id],
    model: OpenAI::Beta::Threads::Run,
    options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options}
  )
end

#submit_tool_outputs(run_id, thread_id: , tool_outputs: , request_options: {}) ⇒ OpenAI::Models::Beta::Threads::Run

See #submit_tool_outputs_stream_raw for streaming counterpart.

Some parameter documentations has been truncated, see Models::Beta::Threads::RunSubmitToolOutputsParams for more details.

When a run has the ‘status: “requires_action”` and `required_action.type` is `submit_tool_outputs`, this endpoint can be used to submit the outputs from the tool calls once they’re all completed. All outputs must be submitted in a single request.

Parameters:

Returns:

See Also:



298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
# File 'lib/openai/resources/beta/threads/runs.rb', line 298

def submit_tool_outputs(run_id, params)
  parsed, options = OpenAI::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params)
  if parsed[:stream]
    message = "Please use `#submit_tool_outputs_stream_raw` for the streaming use case."
    raise ArgumentError.new(message)
  end
  thread_id =
    parsed.delete(:thread_id) do
      raise ArgumentError.new("missing required path argument #{_1}")
    end
  @client.request(
    method: :post,
    path: ["threads/%1$s/runs/%2$s/submit_tool_outputs", thread_id, run_id],
    body: parsed,
    model: OpenAI::Beta::Threads::Run,
    options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options}
  )
end

#submit_tool_outputs_stream_raw(run_id, thread_id: , tool_outputs: , request_options: {}) ⇒ OpenAI::Internal::Stream<OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent>

See #submit_tool_outputs for non-streaming counterpart.

Some parameter documentations has been truncated, see Models::Beta::Threads::RunSubmitToolOutputsParams for more details.

When a run has the ‘status: “requires_action”` and `required_action.type` is `submit_tool_outputs`, this endpoint can be used to submit the outputs from the tool calls once they’re all completed. All outputs must be submitted in a single request.

Parameters:

Returns:

See Also:



341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
# File 'lib/openai/resources/beta/threads/runs.rb', line 341

def submit_tool_outputs_stream_raw(run_id, params)
  parsed, options = OpenAI::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params)
  unless parsed.fetch(:stream, true)
    message = "Please use `#submit_tool_outputs` for the non-streaming use case."
    raise ArgumentError.new(message)
  end
  parsed.store(:stream, true)
  thread_id =
    parsed.delete(:thread_id) do
      raise ArgumentError.new("missing required path argument #{_1}")
    end
  @client.request(
    method: :post,
    path: ["threads/%1$s/runs/%2$s/submit_tool_outputs", thread_id, run_id],
    headers: {"accept" => "text/event-stream"},
    body: parsed,
    stream: OpenAI::Internal::Stream,
    model: OpenAI::Beta::AssistantStreamEvent,
    options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options}
  )
end

#update(run_id, thread_id: , metadata: nil, request_options: {}) ⇒ OpenAI::Models::Beta::Threads::Run

Some parameter documentations has been truncated, see Models::Beta::Threads::RunUpdateParams for more details.

Modifies a run.

Parameters:

  • run_id (String)

    Path param: The ID of the run to modify.

  • thread_id (String)

    Path param: The ID of the [thread](platform.openai.com/docs/api-referenc

  • metadata (Hash{Symbol=>String}, nil)

    Body param: Set of 16 key-value pairs that can be attached to an object. This ca

  • request_options (OpenAI::RequestOptions, Hash{Symbol=>Object}, nil)

Returns:

See Also:



198
199
200
201
202
203
204
205
206
207
208
209
210
211
# File 'lib/openai/resources/beta/threads/runs.rb', line 198

def update(run_id, params)
  parsed, options = OpenAI::Beta::Threads::RunUpdateParams.dump_request(params)
  thread_id =
    parsed.delete(:thread_id) do
      raise ArgumentError.new("missing required path argument #{_1}")
    end
  @client.request(
    method: :post,
    path: ["threads/%1$s/runs/%2$s", thread_id, run_id],
    body: parsed,
    model: OpenAI::Beta::Threads::Run,
    options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options}
  )
end