Class: LlmGateway::Client

Inherits:
Object
  • Object
show all
Defined in:
lib/llm_gateway/client.rb

Class Method Summary collapse

Class Method Details

.build_client(provider, api_key:, model: "none") ⇒ Object



15
16
17
18
19
20
21
22
# File 'lib/llm_gateway/client.rb', line 15

def self.build_client(provider, api_key:, model: "none")
  adapter = LlmGateway.build_provider(
    provider: provider,
    api_key: api_key,
    model_key: model
  )
  adapter.client
end

.chat(model, message, tools: nil, system: nil, api_key: nil, refresh_token: nil, expires_at: nil, **options) ⇒ Object



5
6
7
8
# File 'lib/llm_gateway/client.rb', line 5

def self.chat(model, message, tools: nil, system: nil, api_key: nil, refresh_token: nil, expires_at: nil, **options)
  adapter = build_adapter_from_model(model, api_key: api_key, refresh_token: refresh_token, expires_at: expires_at)
  adapter.chat(message, tools: tools, system: system, **options)
end

.download_file(provider, **kwargs) ⇒ Object



33
34
35
36
37
38
39
40
# File 'lib/llm_gateway/client.rb', line 33

def self.download_file(provider, **kwargs)
  api_key = kwargs.delete(:api_key)
  adapter = LlmGateway.build_provider(
    provider: provider,
    api_key: api_key
  )
  adapter.download_file(**kwargs)
end

.provider_from_model(model) ⇒ Object



42
43
44
45
46
47
48
49
50
# File 'lib/llm_gateway/client.rb', line 42

def self.provider_from_model(model)
  return "anthropic" if model.start_with?("claude")
  return "groq" if model.start_with?("llama")
  return "openai" if model.start_with?("gpt") ||
                     model.start_with?("o4-") ||
                     model.start_with?("openai")

  raise LlmGateway::Errors::UnsupportedModel, model
end

.provider_id_from_client(client) ⇒ Object



52
53
54
55
56
57
58
59
60
61
62
63
# File 'lib/llm_gateway/client.rb', line 52

def self.provider_id_from_client(client)
  case client
  when LlmGateway::Clients::Anthropic
    "anthropic"
  when LlmGateway::Clients::OpenAI
    "openai"
  when LlmGateway::Clients::Groq
    "groq"
  else
    client.class.name.downcase
  end
end

.responses(model, message, tools: nil, system: nil, api_key: nil, **options) ⇒ Object



10
11
12
13
# File 'lib/llm_gateway/client.rb', line 10

def self.responses(model, message, tools: nil, system: nil, api_key: nil, **options)
  adapter = build_adapter_from_model(model, api_key: api_key, api: "responses")
  adapter.chat(message, tools: tools, system: system, **options)
end

.upload_file(provider, **kwargs) ⇒ Object



24
25
26
27
28
29
30
31
# File 'lib/llm_gateway/client.rb', line 24

def self.upload_file(provider, **kwargs)
  api_key = kwargs.delete(:api_key)
  adapter = LlmGateway.build_provider(
    provider: provider,
    api_key: api_key
  )
  adapter.upload_file(**kwargs)
end