Module: Legion::Extensions::Llm::Openai::Provider::Capabilities

Defined in:
lib/legion/extensions/llm/openai/provider.rb

Overview

Provider-level capability checks based on current OpenAI model families.

Constant Summary collapse

CAPABILITY_CHECKS =
{
  'streaming' => :streaming?,
  'function_calling' => :functions?,
  'vision' => :vision?,
  'embeddings' => :embeddings?,
  'moderation' => :moderation?,
  'image' => :images?,
  'audio_transcription' => :audio_transcription?
}.freeze

Class Method Summary collapse

Class Method Details

.audio_transcription?(model) ⇒ Boolean

Returns:

  • (Boolean)


52
# File 'lib/legion/extensions/llm/openai/provider.rb', line 52

def audio_transcription?(model) = model_id(model).match?(/^(gpt-4o.*transcribe|whisper)/)

.chat?(model) ⇒ Boolean

Returns:

  • (Boolean)


45
# File 'lib/legion/extensions/llm/openai/provider.rb', line 45

def chat?(model) = !non_chat_model?(model_id(model))

.critical_capabilities_for(model) ⇒ Object



54
55
56
57
# File 'lib/legion/extensions/llm/openai/provider.rb', line 54

def critical_capabilities_for(model)
  id = model_id(model)
  CAPABILITY_CHECKS.filter_map { |capability, predicate| capability if public_send(predicate, id) }
end

.embeddings?(model) ⇒ Boolean

Returns:

  • (Boolean)


49
# File 'lib/legion/extensions/llm/openai/provider.rb', line 49

def embeddings?(model) = model_id(model).start_with?('text-embedding-')

.functions?(model) ⇒ Boolean

Returns:

  • (Boolean)


47
# File 'lib/legion/extensions/llm/openai/provider.rb', line 47

def functions?(model) = model_id(model).match?(/^(gpt|o\d)/)

.images?(model) ⇒ Boolean

Returns:

  • (Boolean)


51
# File 'lib/legion/extensions/llm/openai/provider.rb', line 51

def images?(model) = model_id(model).match?(/^(gpt-image|dall-e)/)

.model_id(model) ⇒ Object



59
60
61
# File 'lib/legion/extensions/llm/openai/provider.rb', line 59

def model_id(model)
  model.respond_to?(:id) ? model.id.to_s : model.to_s
end

.moderation?(model) ⇒ Boolean

Returns:

  • (Boolean)


50
# File 'lib/legion/extensions/llm/openai/provider.rb', line 50

def moderation?(model) = model_id(model).include?('moderation')

.non_chat_model?(id) ⇒ Boolean

Returns:

  • (Boolean)


63
64
65
66
# File 'lib/legion/extensions/llm/openai/provider.rb', line 63

def non_chat_model?(id)
  embeddings?(id) || moderation?(id) || images?(id) || audio_transcription?(id) ||
    id.match?(/^(tts|gpt-realtime|sora)/)
end

.streaming?(model) ⇒ Boolean

Returns:

  • (Boolean)


46
# File 'lib/legion/extensions/llm/openai/provider.rb', line 46

def streaming?(model) = chat?(model)

.vision?(model) ⇒ Boolean

Returns:

  • (Boolean)


48
# File 'lib/legion/extensions/llm/openai/provider.rb', line 48

def vision?(model) = model_id(model).match?(/^(gpt|o\d|omni-moderation)/)