Class: LlmGateway::Clients::OpenAI

Inherits:
BaseClient show all
Defined in:
lib/llm_gateway/clients/openai.rb,
lib/llm_gateway/clients/openai_codex/oauth_flow.rb,
lib/llm_gateway/clients/openai_codex/token_manager.rb

Defined Under Namespace

Classes: OAuthFlow, TokenManager

Constant Summary collapse

CODEX_BASE_ENDPOINT =
"https://chatgpt.com/backend-api/codex"

Instance Attribute Summary collapse

Attributes inherited from BaseClient

#api_key, #base_endpoint, #model_key

Instance Method Summary collapse

Methods inherited from BaseClient

#get, #post, #post_file, #post_stream

Constructor Details

#initialize(model_key: "gpt-4o", api_key: ENV["OPENAI_API_KEY"], account_id: nil) ⇒ OpenAI

Returns a new instance of OpenAI.



12
13
14
15
16
# File 'lib/llm_gateway/clients/openai.rb', line 12

def initialize(model_key: "gpt-4o", api_key: ENV["OPENAI_API_KEY"], account_id: nil)
  @base_endpoint = "https://api.openai.com/v1"
  @account_id = 
  super(model_key: model_key, api_key: api_key)
end

Instance Attribute Details

#account_idObject (readonly)

Returns the value of attribute account_id.



10
11
12
# File 'lib/llm_gateway/clients/openai.rb', line 10

def 
  @account_id
end

Instance Method Details

#chat(messages, tools: nil, system: [], **options) ⇒ Object



18
19
20
21
22
23
24
25
26
27
# File 'lib/llm_gateway/clients/openai.rb', line 18

def chat(messages, tools: nil, system: [], **options)
  body = {
    model: model_key,
    messages: system + messages
  }
  body[:tools] = tools if tools
  body.merge!(options)

  post("chat/completions", body)
end

#chat_codex(messages, tools: nil, system: [], account_id: nil, **options) ⇒ Object



77
78
79
80
81
82
83
84
85
86
87
88
# File 'lib/llm_gateway/clients/openai.rb', line 77

def chat_codex(messages, tools: nil, system: [], account_id: nil, **options)
  body = build_codex_body(messages, system, tools, **options)

  completed_response = nil
  post_codex_stream("responses", body, account_id: ) do |raw_sse|
    if raw_sse[:event] == "response.completed"
      completed_response = raw_sse.dig(:data, :response)
    end
  end

  completed_response
end

#download_file(file_id) ⇒ Object



95
96
97
# File 'lib/llm_gateway/clients/openai.rb', line 95

def download_file(file_id)
  get("files/#{file_id}/content")
end

#generate_embeddings(input) ⇒ Object



99
100
101
102
103
104
105
# File 'lib/llm_gateway/clients/openai.rb', line 99

def generate_embeddings(input)
  body = {
    input:,
    model: model_key
  }
  post("embeddings", body)
end

#get_oauth_access_token(access_token:, refresh_token:, expires_at:, account_id: nil, &block) ⇒ Object



65
66
67
68
69
70
71
72
73
74
75
# File 'lib/llm_gateway/clients/openai.rb', line 65

def get_oauth_access_token(access_token:, refresh_token:, expires_at:, account_id: nil, &block)
  token_manager = LlmGateway::Clients::OpenAI::TokenManager.new(
    access_token: access_token,
    refresh_token: refresh_token,
    expires_at: expires_at,
    account_id: 
  )
  token_manager.on_token_refresh = block if block_given?
  token_manager.ensure_valid_token
  token_manager.access_token
end

#responses(messages, tools: nil, system: [], **options) ⇒ Object



41
42
43
44
45
46
47
48
49
50
51
# File 'lib/llm_gateway/clients/openai.rb', line 41

def responses(messages, tools: nil, system: [], **options)
  body = {
    model: model_key,
    input: messages.flatten
  }
  body[:instructions] = system[0][:content] if system.any?
  body[:tools] = tools if tools
  body.merge!(options)

  post("responses", body)
end

#stream(messages, tools: nil, system: [], **options, &block) ⇒ Object



29
30
31
32
33
34
35
36
37
38
39
# File 'lib/llm_gateway/clients/openai.rb', line 29

def stream(messages, tools: nil, system: [], **options, &block)
  body = {
    model: model_key,
    messages: system + messages
  }
  body[:tools] = tools if tools
  body.merge!(options)
  body[:stream_options] = (body[:stream_options] || {}).merge(include_usage: true)

  post_stream("chat/completions", body, &block)
end

#stream_codex(messages, tools: nil, system: [], account_id: nil, **options, &block) ⇒ Object



90
91
92
93
# File 'lib/llm_gateway/clients/openai.rb', line 90

def stream_codex(messages, tools: nil, system: [], account_id: nil, **options, &block)
  body = build_codex_body(messages, system, tools, **options)
  post_codex_stream("responses", body, account_id: , &block)
end

#stream_responses(messages, tools: nil, system: [], **options, &block) ⇒ Object



53
54
55
56
57
58
59
60
61
62
63
# File 'lib/llm_gateway/clients/openai.rb', line 53

def stream_responses(messages, tools: nil, system: [], **options, &block)
  body = {
    model: model_key,
    input: messages.flatten
  }
  body[:instructions] = system[0][:content] if system.any?
  body[:tools] = tools if tools
  body.merge!(options)

  post_stream("responses", body, &block)
end

#upload_file(filename, content, mime_type = "application/octet-stream", purpose: "user_data") ⇒ Object



107
108
109
# File 'lib/llm_gateway/clients/openai.rb', line 107

def upload_file(filename, content, mime_type = "application/octet-stream", purpose: "user_data")
  post_file("files", content, filename, purpose: purpose, mime_type: mime_type)
end