Module: Legion::LLM::API::OpenAI::Models
- Extended by:
- Legion::Logging::Helper
- Defined in:
- lib/legion/llm/api/openai/models.rb
Constant Summary collapse
- PROVIDER_DEFAULT_MODELS =
{ bedrock: 'us.anthropic.claude-sonnet-4-6-v1', anthropic: 'claude-sonnet-4-6', openai: 'gpt-4o', gemini: 'gemini-2.0-flash', azure: nil, ollama: 'llama3' }.freeze
Class Method Summary collapse
- .build_model_list ⇒ Object
- .models_from_discovery ⇒ Object
- .models_from_providers ⇒ Object
- .registered(app) ⇒ Object
Class Method Details
.build_model_list ⇒ Object
67 68 69 70 71 72 73 74 75 |
# File 'lib/legion/llm/api/openai/models.rb', line 67 def self.build_model_list models = [] models.concat(models_from_discovery) models.concat(models_from_providers) seen = {} models.select { |m| seen[m[:id]] ? false : (seen[m[:id]] = true) } end |
.models_from_discovery ⇒ Object
77 78 79 80 81 82 83 84 85 86 87 |
# File 'lib/legion/llm/api/openai/models.rb', line 77 def self.models_from_discovery return [] unless defined?(Legion::LLM::Discovery::Ollama) && Legion::LLM::Discovery::Ollama.respond_to?(:available_models) Legion::LLM::Discovery::Ollama.available_models.map do |model_id| Legion::LLM::API::Translators::OpenAIResponse.format_model_object(model_id, owned_by: 'ollama') end rescue StandardError => e handle_exception(e, level: :warn, handled: true, operation: 'llm.api.openai.models.discovery') [] end |
.models_from_providers ⇒ Object
89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 |
# File 'lib/legion/llm/api/openai/models.rb', line 89 def self.models_from_providers providers_config = Legion::LLM.settings.fetch(:providers, {}) providers_config.filter_map do |name, config| next unless config.is_a?(Hash) && config[:enabled] != false model_id = config[:default_model] || PROVIDER_DEFAULT_MODELS[name.to_sym] next unless model_id Legion::LLM::API::Translators::OpenAIResponse.format_model_object( model_id, owned_by: name.to_s ) end rescue StandardError => e handle_exception(e, level: :warn, handled: true, operation: 'llm.api.openai.models.providers') [] end |
.registered(app) ⇒ Object
22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 |
# File 'lib/legion/llm/api/openai/models.rb', line 22 def self.registered(app) log.debug('[llm][api][openai][models] registering GET /v1/models and GET /v1/models/:id') app.get '/v1/models' do log.debug('[llm][api][openai][models] action=list') require_llm! model_list = Legion::LLM::API::OpenAI::Models.build_model_list log.debug("[llm][api][openai][models] action=listed count=#{model_list.size}") content_type :json Legion::JSON.dump({ object: 'list', data: model_list }) rescue StandardError => e handle_exception(e, level: :error, handled: true, operation: 'llm.api.openai.models.list') halt 500, { 'Content-Type' => 'application/json' }, Legion::JSON.dump({ error: { message: e., type: 'server_error' } }) end app.get '/v1/models/:id' do model_id = params[:id] log.debug("[llm][api][openai][models] action=get id=#{model_id}") require_llm! model_list = Legion::LLM::API::OpenAI::Models.build_model_list found = model_list.find { |m| m[:id] == model_id } unless found log.debug("[llm][api][openai][models] action=not_found id=#{model_id}") halt 404, { 'Content-Type' => 'application/json' }, Legion::JSON.dump({ error: { message: "Model '#{model_id}' not found", type: 'invalid_request_error', code: 'model_not_found' } }) end log.debug("[llm][api][openai][models] action=found id=#{model_id}") content_type :json Legion::JSON.dump(found) rescue StandardError => e handle_exception(e, level: :error, handled: true, operation: 'llm.api.openai.models.get') halt 500, { 'Content-Type' => 'application/json' }, Legion::JSON.dump({ error: { message: e., type: 'server_error' } }) end log.debug('[llm][api][openai][models] GET /v1/models routes registered') end |