Module: Legion::Extensions::Llm::Openai::Provider::Capabilities
- Defined in:
- lib/legion/extensions/llm/openai/provider.rb
Overview
Provider-level capability checks based on current OpenAI model families.
Constant Summary collapse
- CAPABILITY_CHECKS =
{ 'streaming' => :streaming?, 'function_calling' => :functions?, 'vision' => :vision?, 'embeddings' => :embeddings?, 'moderation' => :moderation?, 'image' => :images?, 'audio_transcription' => :audio_transcription? }.freeze
Class Method Summary collapse
- .audio_transcription?(model) ⇒ Boolean
- .chat?(model) ⇒ Boolean
- .critical_capabilities_for(model) ⇒ Object
- .embeddings?(model) ⇒ Boolean
- .functions?(model) ⇒ Boolean
- .images?(model) ⇒ Boolean
- .model_id(model) ⇒ Object
- .moderation?(model) ⇒ Boolean
- .non_chat_model?(id) ⇒ Boolean
- .streaming?(model) ⇒ Boolean
- .vision?(model) ⇒ Boolean
Class Method Details
.audio_transcription?(model) ⇒ Boolean
52 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 52 def audio_transcription?(model) = model_id(model).match?(/^(gpt-4o.*transcribe|whisper)/) |
.chat?(model) ⇒ Boolean
45 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 45 def chat?(model) = !non_chat_model?(model_id(model)) |
.critical_capabilities_for(model) ⇒ Object
54 55 56 57 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 54 def critical_capabilities_for(model) id = model_id(model) CAPABILITY_CHECKS.filter_map { |capability, predicate| capability if public_send(predicate, id) } end |
.embeddings?(model) ⇒ Boolean
49 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 49 def (model) = model_id(model).start_with?('text-embedding-') |
.functions?(model) ⇒ Boolean
47 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 47 def functions?(model) = model_id(model).match?(/^(gpt|o\d)/) |
.images?(model) ⇒ Boolean
51 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 51 def images?(model) = model_id(model).match?(/^(gpt-image|dall-e)/) |
.model_id(model) ⇒ Object
59 60 61 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 59 def model_id(model) model.respond_to?(:id) ? model.id.to_s : model.to_s end |
.moderation?(model) ⇒ Boolean
50 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 50 def moderation?(model) = model_id(model).include?('moderation') |
.non_chat_model?(id) ⇒ Boolean
63 64 65 66 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 63 def non_chat_model?(id) (id) || moderation?(id) || images?(id) || audio_transcription?(id) || id.match?(/^(tts|gpt-realtime|sora)/) end |
.streaming?(model) ⇒ Boolean
46 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 46 def streaming?(model) = chat?(model) |
.vision?(model) ⇒ Boolean
48 |
# File 'lib/legion/extensions/llm/openai/provider.rb', line 48 def vision?(model) = model_id(model).match?(/^(gpt|o\d|omni-moderation)/) |