Class: SmartPrompt::OpenAIAdapter
- Inherits:
-
LLMAdapter
- Object
- LLMAdapter
- SmartPrompt::OpenAIAdapter
- Defined in:
- lib/smart_prompt/openai_adapter.rb
Instance Attribute Summary
Attributes inherited from LLMAdapter
Instance Method Summary collapse
- #embeddings(text, model) ⇒ Object
-
#initialize(config) ⇒ OpenAIAdapter
constructor
A new instance of OpenAIAdapter.
- #send_request(messages, model = nil, temperature = 0.7, tools = nil, proc = nil) ⇒ Object
Constructor Details
#initialize(config) ⇒ OpenAIAdapter
Returns a new instance of OpenAIAdapter.
5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
# File 'lib/smart_prompt/openai_adapter.rb', line 5 def initialize(config) super api_key = @config["api_key"] if api_key.is_a?(String) && api_key.start_with?("ENV[") && api_key.end_with?("]") api_key = eval(api_key) end begin @client = OpenAI::Client.new( access_token: api_key, uri_base: @config["url"], request_timeout: 240, ) rescue OpenAI::ConfigurationError => e SmartPrompt.logger.error "Failed to initialize OpenAI client: #{e.}" raise LLMAPIError, "Invalid OpenAI configuration: #{e.}" rescue OpenAI::Error => e SmartPrompt.logger.error "Failed to initialize OpenAI client: #{e.}" raise LLMAPIError, "OpenAI authentication failed: #{e.}" rescue SocketError => e SmartPrompt.logger.error "Failed to initialize OpenAI client: #{e.}" raise LLMAPIError, "Network error: Unable to connect to OpenAI API" rescue => e SmartPrompt.logger.error "Failed to initialize OpenAI client: #{e.}" raise Error, "Unexpected error initializing OpenAI client: #{e.}" ensure SmartPrompt.logger.info "Successful creation an OpenAI client." end end |
Instance Method Details
#embeddings(text, model) ⇒ Object
79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 |
# File 'lib/smart_prompt/openai_adapter.rb', line 79 def (text, model) SmartPrompt.logger.info "OpenAIAdapter: get embeddings from Ollama" if model model_name = model else model_name = @config["model"] end SmartPrompt.logger.info "OpenAIAdapter: Using model #{model_name}" begin response = @client.( parameters: { model: model_name, input: text.to_s, }, ) rescue => e SmartPrompt.logger.error "Unexpected error during Ollama request: #{e.}" raise Error, "Unexpected error during Ollama request: #{e.}" ensure SmartPrompt.logger.info "Successful send a message" end return response.dig("data", 0, "embedding") end |
#send_request(messages, model = nil, temperature = 0.7, tools = nil, proc = nil) ⇒ Object
34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
# File 'lib/smart_prompt/openai_adapter.rb', line 34 def send_request(, model = nil, temperature = 0.7, tools = nil, proc = nil) SmartPrompt.logger.info "OpenAIAdapter: Sending request to OpenAI" temperature = 0.7 if temperature == nil if model model_name = model else model_name = @config["model"] end SmartPrompt.logger.info "OpenAIAdapter: Using model #{model_name}" begin parameters = { model: model_name, messages: , temperature: @config["temperature"] || temperature, } if proc parameters[:stream] = proc end if tools parameters[:tools] = tools end SmartPrompt.logger.info "Send parameters is: #{parameters}" response = @client.chat(parameters: parameters) rescue OpenAI::Error => e SmartPrompt.logger.error "OpenAI API error: #{e.}" raise LLMAPIError, "OpenAI API error: #{e.}" rescue OpenAI::MiddlewareErrors => e SmartPrompt.logger.error "OpenAI HTTP Error: #{e.}" raise LLMAPIError, "OpenAI HTTP Error" rescue JSON::ParserError => e SmartPrompt.logger.error "Failed to parse OpenAI API response" raise LLMAPIError, "Failed to parse OpenAI API response" rescue => e SmartPrompt.logger.error "Unexpected error during OpenAI request: #{e.}" raise Error, "Unexpected error during OpenAI request: #{e.}" ensure SmartPrompt.logger.info "Successful send a message" end SmartPrompt.logger.info "OpenAIAdapter: Received response from OpenAI" if proc == nil @last_response = response return response.dig("choices", 0, "message", "content") end end |