Class: RubyLLM::Providers::Anthropic
- Defined in:
- lib/ruby_llm/providers/anthropic.rb
Instance Attribute Summary
Attributes inherited from Base
Instance Method Summary collapse
- #chat(messages, model: nil, temperature: 0.7, stream: false, tools: nil, &block) ⇒ Object
- #list_models ⇒ Object
Methods inherited from Base
Constructor Details
This class inherits a constructor from RubyLLM::Providers::Base
Instance Method Details
#chat(messages, model: nil, temperature: 0.7, stream: false, tools: nil, &block) ⇒ Object
| 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 | # File 'lib/ruby_llm/providers/anthropic.rb', line 8 def chat(, model: nil, temperature: 0.7, stream: false, tools: nil, &block) payload = { model: model || 'claude-3-5-sonnet-20241022', messages: (), temperature: temperature, stream: stream, max_tokens: 4096 } payload[:tools] = tools.map { |tool| tool_to_anthropic(tool) } if tools&.any? puts 'Sending payload to Anthropic:' if ENV['RUBY_LLM_DEBUG'] puts JSON.pretty_generate(payload) if ENV['RUBY_LLM_DEBUG'] if stream && block_given? stream_chat_completion(payload, tools, &block) else create_chat_completion(payload, tools) end end | 
#list_models ⇒ Object
| 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 | # File 'lib/ruby_llm/providers/anthropic.rb', line 29 def list_models response = @connection.get('/v1/models') do |req| req.headers['x-api-key'] = RubyLLM.configuration.anthropic_api_key req.headers['anthropic-version'] = '2023-06-01' end raise RubyLLM::Error, "API error: #{(response)}" if response.status >= 400 capabilities = RubyLLM::ModelCapabilities::Anthropic.new models_data = response.body['data'] || [] models_data.map do |model| ModelInfo.new( id: model['id'], created_at: Time.parse(model['created_at']), display_name: model['display_name'], provider: 'anthropic', metadata: { type: model['type'] }, context_window: capabilities.determine_context_window(model['id']), max_tokens: capabilities.determine_max_tokens(model['id']), supports_vision: capabilities.supports_vision?(model['id']), supports_functions: capabilities.supports_functions?(model['id']), supports_json_mode: capabilities.supports_json_mode?(model['id']), input_price_per_million: capabilities.get_input_price(model['id']), output_price_per_million: capabilities.get_output_price(model['id']) ) end rescue Faraday::Error => e handle_error(e) end |