Class: Kreator::Providers::OpenAI

Inherits:
Base
  • Object
show all
Defined in:
lib/kreator/providers/openai.rb

Direct Known Subclasses

OpenRouter

Constant Summary collapse

DEFAULT_BASE_URL =
"https://api.openai.com/v1"

Constants inherited from Base

Base::DEFAULT_MAX_RETRIES, Base::TRANSIENT_HTTP_STATUSES

Instance Attribute Summary

Attributes inherited from Base

#api_key, #base_url, #max_retries, #name

Instance Method Summary collapse

Constructor Details

#initialize(api_key: ENV.fetch("OPENAI_API_KEY", nil), base_url: ENV.fetch("OPENAI_BASE_URL", DEFAULT_BASE_URL), name: "openai", max_retries: DEFAULT_MAX_RETRIES) ⇒ OpenAI

Returns a new instance of OpenAI.

Raises:



8
9
10
11
12
# File 'lib/kreator/providers/openai.rb', line 8

def initialize(api_key: ENV.fetch("OPENAI_API_KEY", nil), base_url: ENV.fetch("OPENAI_BASE_URL", DEFAULT_BASE_URL), name: "openai", max_retries: DEFAULT_MAX_RETRIES)
  raise Error, "OPENAI_API_KEY is required for the openai provider" if api_key.to_s.empty?

  super
end

Instance Method Details

#capabilities(model) ⇒ Object



35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# File 'lib/kreator/providers/openai.rb', line 35

def capabilities(model)
  context_window =
    case model.to_s
    when /gpt-5(?:\.|-|\z)/
      400_000
    when /4\.1/, /4o/
      128_000
    end

  super.merge(
    "vision" => model.to_s.match?(/gpt-5|4o|4\.1/),
    "reasoning" => model.to_s.match?(/gpt-5|o[134]|reasoning/),
    "context_window" => context_window
  )
end

#stream(messages:, tools:, system_prompt:, model:, signal:) {|type: "message_start", role: "assistant"| ... } ⇒ Object

Yields:

  • (type: "message_start", role: "assistant")


14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
# File 'lib/kreator/providers/openai.rb', line 14

def stream(messages:, tools:, system_prompt:, model:, signal:, &)
  yield type: "message_start", role: "assistant"

  body = openai_stream_body(messages, tools, system_prompt, model)
  stream_state = {
    tool_call_builders: {},
    emitted_model_output: false
  }

  begin
    stream_openai_events(body, signal, stream_state, &)

    tool_calls = stream_state.fetch(:tool_call_builders).values.map { |builder| build_tool_call(builder) }
    yield type: "message_end", tool_calls: tool_calls
  rescue Providers::Error
    raise if stream_state.fetch(:emitted_model_output)

    complete_once(body.merge(stream: false).except(:stream_options), &)
  end
end