Module: Legion::Extensions::Llm::Openai::Provider::Capabilities

Defined in:
lib/legion/extensions/llm/openai/provider.rb

Overview

Provider-level capability checks based on current OpenAI model families.

Constant Summary collapse

CAPABILITY_CHECKS =
{
  'streaming' => :streaming?,
  'function_calling' => :functions?,
  'vision' => :vision?,
  'embeddings' => :embeddings?,
  'moderation' => :moderation?,
  'image' => :images?,
  'audio_transcription' => :audio_transcription?
}.freeze

Class Method Summary collapse

Class Method Details

.audio_transcription?(model) ⇒ Boolean

Returns:

  • (Boolean)


135
# File 'lib/legion/extensions/llm/openai/provider.rb', line 135

def audio_transcription?(model) = model_id(model).match?(/^(gpt-4o.*transcribe|whisper)/)

.chat?(model) ⇒ Boolean

Returns:

  • (Boolean)


128
# File 'lib/legion/extensions/llm/openai/provider.rb', line 128

def chat?(model) = !non_chat_model?(model_id(model))

.critical_capabilities_for(model) ⇒ Object



137
138
139
140
# File 'lib/legion/extensions/llm/openai/provider.rb', line 137

def critical_capabilities_for(model)
  id = model_id(model)
  CAPABILITY_CHECKS.filter_map { |capability, predicate| capability if public_send(predicate, id) }
end

.embeddings?(model) ⇒ Boolean

Returns:

  • (Boolean)


132
# File 'lib/legion/extensions/llm/openai/provider.rb', line 132

def embeddings?(model) = model_id(model).start_with?('text-embedding-')

.functions?(model) ⇒ Boolean

Returns:

  • (Boolean)


130
# File 'lib/legion/extensions/llm/openai/provider.rb', line 130

def functions?(model) = model_id(model).match?(/^(gpt|o\d)/)

.images?(model) ⇒ Boolean

Returns:

  • (Boolean)


134
# File 'lib/legion/extensions/llm/openai/provider.rb', line 134

def images?(model) = model_id(model).match?(/^(gpt-image|dall-e)/)

.model_id(model) ⇒ Object



142
143
144
145
146
# File 'lib/legion/extensions/llm/openai/provider.rb', line 142

def model_id(model)
  return model.fetch('id', '') if model.is_a?(Hash)

  model.respond_to?(:id) ? model.id.to_s : model.to_s
end

.moderation?(model) ⇒ Boolean

Returns:

  • (Boolean)


133
# File 'lib/legion/extensions/llm/openai/provider.rb', line 133

def moderation?(model) = model_id(model).include?('moderation')

.non_chat_model?(id) ⇒ Boolean

Returns:

  • (Boolean)


148
149
150
151
# File 'lib/legion/extensions/llm/openai/provider.rb', line 148

def non_chat_model?(id)
  embeddings?(id) || moderation?(id) || images?(id) || audio_transcription?(id) ||
    id.match?(/^(tts|gpt-realtime|sora)/)
end

.streaming?(model) ⇒ Boolean

Returns:

  • (Boolean)


129
# File 'lib/legion/extensions/llm/openai/provider.rb', line 129

def streaming?(model) = chat?(model)

.vision?(model) ⇒ Boolean

Returns:

  • (Boolean)


131
# File 'lib/legion/extensions/llm/openai/provider.rb', line 131

def vision?(model) = model_id(model).match?(/^(gpt|o\d|omni-moderation)/)