Module: Legion::Extensions::Llm::Openai::Provider::Capabilities

Defined in:
lib/legion/extensions/llm/openai/provider.rb

Overview

Provider-level capability checks based on current OpenAI model families.

Constant Summary collapse

CAPABILITY_CHECKS =
{
  'streaming' => :streaming?,
  'function_calling' => :functions?,
  'vision' => :vision?,
  'embeddings' => :embeddings?,
  'moderation' => :moderation?,
  'image' => :images?,
  'audio_transcription' => :audio_transcription?
}.freeze

Class Method Summary collapse

Class Method Details

.audio_transcription?(model) ⇒ Boolean

Returns:

  • (Boolean)


57
# File 'lib/legion/extensions/llm/openai/provider.rb', line 57

def audio_transcription?(model) = model_id(model).match?(/^(gpt-4o.*transcribe|whisper)/)

.chat?(model) ⇒ Boolean

Returns:

  • (Boolean)


50
# File 'lib/legion/extensions/llm/openai/provider.rb', line 50

def chat?(model) = !non_chat_model?(model_id(model))

.critical_capabilities_for(model) ⇒ Object



59
60
61
62
# File 'lib/legion/extensions/llm/openai/provider.rb', line 59

def critical_capabilities_for(model)
  id = model_id(model)
  CAPABILITY_CHECKS.filter_map { |capability, predicate| capability if public_send(predicate, id) }
end

.embeddings?(model) ⇒ Boolean

Returns:

  • (Boolean)


54
# File 'lib/legion/extensions/llm/openai/provider.rb', line 54

def embeddings?(model) = model_id(model).start_with?('text-embedding-')

.functions?(model) ⇒ Boolean

Returns:

  • (Boolean)


52
# File 'lib/legion/extensions/llm/openai/provider.rb', line 52

def functions?(model) = model_id(model).match?(/^(gpt|o\d)/)

.images?(model) ⇒ Boolean

Returns:

  • (Boolean)


56
# File 'lib/legion/extensions/llm/openai/provider.rb', line 56

def images?(model) = model_id(model).match?(/^(gpt-image|dall-e)/)

.model_id(model) ⇒ Object



64
65
66
67
68
# File 'lib/legion/extensions/llm/openai/provider.rb', line 64

def model_id(model)
  return model.fetch('id', '') if model.is_a?(Hash)

  model.respond_to?(:id) ? model.id.to_s : model.to_s
end

.moderation?(model) ⇒ Boolean

Returns:

  • (Boolean)


55
# File 'lib/legion/extensions/llm/openai/provider.rb', line 55

def moderation?(model) = model_id(model).include?('moderation')

.non_chat_model?(id) ⇒ Boolean

Returns:

  • (Boolean)


70
71
72
73
# File 'lib/legion/extensions/llm/openai/provider.rb', line 70

def non_chat_model?(id)
  embeddings?(id) || moderation?(id) || images?(id) || audio_transcription?(id) ||
    id.match?(/^(tts|gpt-realtime|sora)/)
end

.streaming?(model) ⇒ Boolean

Returns:

  • (Boolean)


51
# File 'lib/legion/extensions/llm/openai/provider.rb', line 51

def streaming?(model) = chat?(model)

.vision?(model) ⇒ Boolean

Returns:

  • (Boolean)


53
# File 'lib/legion/extensions/llm/openai/provider.rb', line 53

def vision?(model) = model_id(model).match?(/^(gpt|o\d|omni-moderation)/)