Class: RubyLLM::Providers::OpenAI

Inherits:
Base
  • Object
show all
Defined in:
lib/ruby_llm/providers/openai.rb

Instance Attribute Summary

Attributes inherited from Base

#connection

Instance Method Summary collapse

Methods inherited from Base

#initialize

Constructor Details

This class inherits a constructor from RubyLLM::Providers::Base

Instance Method Details

#chat(messages, model: nil, temperature: 0.7, stream: false, tools: nil, &block) ⇒ Object



6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
# File 'lib/ruby_llm/providers/openai.rb', line 6

def chat(messages, model: nil, temperature: 0.7, stream: false, tools: nil, &block)
  payload = {
    model: model || RubyLLM.configuration.default_model,
    messages: messages.map(&:to_h),
    temperature: temperature,
    stream: stream
  }

  if tools&.any?
    payload[:functions] = tools.map { |tool| tool_to_function(tool) }
    payload[:function_call] = 'auto'
  end

  puts 'Sending payload to OpenAI:' if ENV['RUBY_LLM_DEBUG']
  puts JSON.pretty_generate(payload) if ENV['RUBY_LLM_DEBUG']

  if stream && block_given?
    stream_chat_completion(payload, tools, &block)
  else
    create_chat_completion(payload, tools, &block)
  end
rescue Faraday::TimeoutError
  raise RubyLLM::Error, 'Request timed out'
rescue Faraday::ConnectionFailed
  raise RubyLLM::Error, 'Connection failed'
rescue Faraday::ClientError => e
  raise RubyLLM::Error, 'Client error' unless e.response

  error_msg = e.response[:body]['error']&.fetch('message', nil) || "HTTP #{e.response[:status]}"
  raise RubyLLM::Error, "API error: #{error_msg}"
end

#list_modelsObject



38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
# File 'lib/ruby_llm/providers/openai.rb', line 38

def list_models
  response = @connection.get('/v1/models') do |req|
    req.headers['Authorization'] = "Bearer #{RubyLLM.configuration.openai_api_key}"
  end

  raise RubyLLM::Error, "API error: #{parse_error_message(response)}" if response.status >= 400

  capabilities = RubyLLM::ModelCapabilities::OpenAI.new
  (response.body['data'] || []).map do |model|
    ModelInfo.new(
      id: model['id'],
      created_at: Time.at(model['created']),
      display_name: capabilities.format_display_name(model['id']),
      provider: 'openai',
      metadata: {
        object: model['object'],
        owned_by: model['owned_by']
      },
      context_window: capabilities.determine_context_window(model['id']),
      max_tokens: capabilities.determine_max_tokens(model['id']),
      supports_vision: capabilities.supports_vision?(model['id']),
      supports_functions: capabilities.supports_functions?(model['id']),
      supports_json_mode: capabilities.supports_json_mode?(model['id']),
      input_price_per_million: capabilities.get_input_price(model['id']),
      output_price_per_million: capabilities.get_output_price(model['id'])
    )
  end
rescue Faraday::Error => e
  handle_error(e)
end