Class: Legion::Extensions::Llm::Openai::Provider
- Inherits:
-
Provider
- Object
- Provider
- Legion::Extensions::Llm::Openai::Provider
- Includes:
- Provider::OpenAICompatible, Logging::Helper
- Defined in:
- lib/legion/extensions/llm/openai/provider.rb
Overview
OpenAI provider implementation for the Legion::Extensions::Llm base provider contract.
Defined Under Namespace
Modules: Capabilities
Constant Summary collapse
- CAPABILITY_MAP =
── Static capability map for known OpenAI model families ──────Maps model-id prefixes to a set of capabilities and modality vectors. Used by list_models to build Model::Info structs from the raw /v1/models response.
{ 'gpt-4o' => { capabilities: %i[completion streaming function_calling vision structured_output], modalities_input: %w[text image audio], modalities_output: %w[text] }, 'gpt-4.1' => { capabilities: %i[completion streaming function_calling vision structured_output], modalities_input: %w[text image], modalities_output: %w[text] }, 'gpt-4' => { capabilities: %i[completion streaming function_calling vision], modalities_input: %w[text image], modalities_output: %w[text] }, 'gpt-5' => { capabilities: %i[completion streaming function_calling vision structured_output reasoning], modalities_input: %w[text image], modalities_output: %w[text] }, 'o4' => { capabilities: %i[completion streaming function_calling vision reasoning], modalities_input: %w[text image], modalities_output: %w[text] }, 'o3' => { capabilities: %i[completion streaming function_calling vision reasoning], modalities_input: %w[text image], modalities_output: %w[text] }, 'o1' => { capabilities: %i[completion streaming function_calling vision reasoning], modalities_input: %w[text image], modalities_output: %w[text] }, 'text-embedding-' => { capabilities: %i[embedding], modalities_input: %w[text], modalities_output: %w[embeddings] }, 'omni-moderation' => { capabilities: %i[moderation], modalities_input: %w[text image], modalities_output: %w[moderation] }, 'text-moderation' => { capabilities: %i[moderation], modalities_input: %w[text], modalities_output: %w[moderation] }, 'gpt-image' => { capabilities: %i[image_generation], modalities_input: %w[text image], modalities_output: %w[image] }, 'dall-e' => { capabilities: %i[image_generation], modalities_input: %w[text], modalities_output: %w[image] }, 'whisper' => { capabilities: %i[audio_transcription], modalities_input: %w[audio], modalities_output: %w[text] }, 'tts' => { capabilities: %i[audio_generation], modalities_input: %w[text], modalities_output: %w[audio] } }.freeze
Class Attribute Summary collapse
Class Method Summary collapse
- .capabilities ⇒ Object
- .configuration_options ⇒ Object
- .configuration_requirements ⇒ Object
- .slug ⇒ Object
Instance Method Summary collapse
- #api_base ⇒ Object
- #chat_url ⇒ Object
- #headers ⇒ Object
- #image_edit_url ⇒ Object
- #image_generation_url ⇒ Object
- #image_variation_url ⇒ Object
- #images_url(with: nil, mask: nil) ⇒ Object
- #list_models ⇒ Object
- #retrieve_model(model) ⇒ Object
- #stream_usage_supported? ⇒ Boolean
Class Attribute Details
.registry_publisher ⇒ Object
109 110 111 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 109 def registry_publisher @registry_publisher ||= Legion::Extensions::Llm::RegistryPublisher.new(provider_family: :openai) end |
Class Method Details
.capabilities ⇒ Object
107 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 107 def capabilities = Capabilities |
.configuration_options ⇒ Object
97 98 99 100 101 102 103 104 105 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 97 def %i[ openai_api_key openai_api_base openai_organization_id openai_project_id openai_use_system_role ] end |
.configuration_requirements ⇒ Object
95 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 95 def configuration_requirements = %i[openai_api_key] |
.slug ⇒ Object
94 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 94 def slug = 'openai' |
Instance Method Details
#api_base ⇒ Object
156 157 158 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 156 def api_base config.openai_api_base || 'https://api.openai.com' end |
#chat_url ⇒ Object
168 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 168 def chat_url = completion_url |
#headers ⇒ Object
160 161 162 163 164 165 166 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 160 def headers { 'Authorization' => "Bearer #{config.openai_api_key}", 'OpenAI-Organization' => config.openai_organization_id, 'OpenAI-Project' => config.openai_project_id }.compact end |
#image_edit_url ⇒ Object
170 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 170 def image_edit_url = '/v1/images/edits' |
#image_generation_url ⇒ Object
169 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 169 def image_generation_url = '/v1/images/generations' |
#image_variation_url ⇒ Object
171 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 171 def image_variation_url = '/v1/images/variations' |
#images_url(with: nil, mask: nil) ⇒ Object
172 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 172 def images_url(with: nil, mask: nil) = super |
#list_models ⇒ Object
183 184 185 186 187 188 189 190 191 192 193 194 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 183 def list_models log.info('Listing OpenAI models') raw = connection.get(models_url) models = build_model_infos(raw.body) log.info("Discovered #{models.size} OpenAI models") self.class.registry_publisher.publish_models_async(models, readiness: readiness(live: false)) models rescue StandardError => e handle_exception(e, level: :error, handled: true, operation: 'list_models') raise end |
#retrieve_model(model) ⇒ Object
174 175 176 177 178 179 180 181 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 174 def retrieve_model(model) log.info("Retrieving model: #{model}") connection.get("#{models_url}/#{model}").body rescue StandardError => e handle_exception(e, level: :error, handled: true, operation: 'retrieve_model') raise end |
#stream_usage_supported? ⇒ Boolean
154 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 154 def stream_usage_supported? = true |