Class: AI::Chat

Inherits:
Object
  • Object
show all
Defined in:
lib/ai/chat.rb

Overview

:reek:MissingSafeMethod { exclude: [ generate! ] } :reek:TooManyMethods :reek:TooManyInstanceVariables :reek:InstanceVariableAssumption :reek:IrresponsibleModule

Defined Under Namespace

Classes: InputClassificationError, WrongAPITokenUsedError

Constant Summary collapse

BASE_PROXY_URL =
"https://prepend.me/api.openai.com/v1"
PROXY_ENV =
"AICHAT_PROXY"
PROXY_KEY_ENV =
"AICHAT_PROXY_KEY"
OPENAI_KEY_ENV =
"OPENAI_API_KEY"

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(api_key: nil, api_key_env_var: nil, proxy: nil) ⇒ Chat

Returns a new instance of Chat.



29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
# File 'lib/ai/chat.rb', line 29

def initialize(api_key: nil, api_key_env_var: nil, proxy: nil)
  @api_key_arg = api_key
  @api_key_env_var_arg = api_key_env_var
  @proxy = proxy.nil? ? ENV[PROXY_ENV]&.downcase == "true" : !!proxy
  @api_key = resolve_api_key
  @messages = []
  @reasoning_effort = nil
  @model = "gpt-5.2"
  client_options = {api_key: @api_key}
  client_options[:base_url] = BASE_PROXY_URL if @proxy
  @client = OpenAI::Client.new(**client_options)
  @last_response_id = nil
  @image_generation = false
  @image_folder = "./images"
  @api_key_validated = false
  @verbosity = :medium
end

Instance Attribute Details

#backgroundObject

:reek:Attribute



21
22
23
# File 'lib/ai/chat.rb', line 21

def background
  @background
end

#clientObject (readonly)

Returns the value of attribute client.



22
23
24
# File 'lib/ai/chat.rb', line 22

def client
  @client
end

#code_interpreterObject

:reek:Attribute



21
22
23
# File 'lib/ai/chat.rb', line 21

def code_interpreter
  @code_interpreter
end

#conversation_idObject

:reek:Attribute



21
22
23
# File 'lib/ai/chat.rb', line 21

def conversation_id
  @conversation_id
end

#image_folderObject

:reek:Attribute



21
22
23
# File 'lib/ai/chat.rb', line 21

def image_folder
  @image_folder
end

#image_generationObject

Returns the value of attribute image_generation.



22
23
24
# File 'lib/ai/chat.rb', line 22

def image_generation
  @image_generation
end

#last_response_idObject (readonly)

Returns the value of attribute last_response_id.



22
23
24
# File 'lib/ai/chat.rb', line 22

def last_response_id
  @last_response_id
end

#messagesObject

:reek:Attribute



21
22
23
# File 'lib/ai/chat.rb', line 21

def messages
  @messages
end

#modelObject

:reek:Attribute



21
22
23
# File 'lib/ai/chat.rb', line 21

def model
  @model
end

#proxyObject

Returns the value of attribute proxy.



22
23
24
# File 'lib/ai/chat.rb', line 22

def proxy
  @proxy
end

#reasoning_effortObject

:reek:Attribute



21
22
23
# File 'lib/ai/chat.rb', line 21

def reasoning_effort
  @reasoning_effort
end

#schemaObject

Returns the value of attribute schema.



22
23
24
# File 'lib/ai/chat.rb', line 22

def schema
  @schema
end

#schema_fileObject

Returns the value of attribute schema_file.



22
23
24
# File 'lib/ai/chat.rb', line 22

def schema_file
  @schema_file
end

#verbosityObject

Returns the value of attribute verbosity.



22
23
24
# File 'lib/ai/chat.rb', line 22

def verbosity
  @verbosity
end

#web_searchObject

:reek:Attribute



21
22
23
# File 'lib/ai/chat.rb', line 21

def web_search
  @web_search
end

Class Method Details

.generate_schema!(description, location: "schema.json", api_key: nil, api_key_env_var: nil, proxy: nil) ⇒ Object



47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
# File 'lib/ai/chat.rb', line 47

def self.generate_schema!(description, location: "schema.json", api_key: nil, api_key_env_var: nil, proxy: nil)
  proxy = proxy.nil? ? ENV[PROXY_ENV]&.downcase == "true" : !!proxy
  api_key = api_key || ENV.fetch(api_key_env_var || (proxy ? PROXY_KEY_ENV : OPENAI_KEY_ENV))
  prompt_path = File.expand_path("../prompts/schema_generator.md", __dir__)
  system_prompt = File.read(prompt_path)

  options = {
    api_key: api_key,
    base_url: proxy ? BASE_PROXY_URL : nil
  }.compact

  client = OpenAI::Client.new(**options)
  response = client.responses.create(
    model: "gpt-5.2",
    input: [
      {role: :system, content: system_prompt},
      {role: :user, content: description}
    ],
    text: {format: {type: "json_object"}},
    reasoning: {effort: "high"}
  )

  output_text = response.output_text
  json = JSON.parse(output_text)

  content = JSON.pretty_generate(json)
  if location
    path = Pathname.new(location)
    FileUtils.mkdir_p(path.dirname) if path.dirname != "."
    File.binwrite(location, content)
  end
  content
end

Instance Method Details

#add(content, role: "user", response: nil, status: nil, image: nil, images: nil, file: nil, files: nil) ⇒ Object

:reek:TooManyStatements :reek:NilCheck



83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
# File 'lib/ai/chat.rb', line 83

def add(content, role: "user", response: nil, status: nil, image: nil, images: nil, file: nil, files: nil)
  message = if image.nil? && images.nil? && file.nil? && files.nil?
    msg = Message[role: role]
    msg[:content] = content if content
    msg[:response] = response if response
    msg[:status] = status if status
    msg
  else
    text_and_files_array = [
      {
        type: "input_text",
        text: content
      }
    ]

    all_images = []
    all_images << image if image
    all_images.concat(Array(images)) if images

    all_images.each do |img|
      text_and_files_array.push(
        {
          type: "input_image",
          image_url: process_image_input(img)
        }
      )
    end

    all_files = []
    all_files << file if file
    all_files.concat(Array(files)) if files

    all_files.each do |file|
      text_and_files_array.push(process_file_input(file))
    end

    Message[
      role: role,
      content: text_and_files_array,
      status: status
    ]
  end

  messages.push(message)
  message
end

#assistant(message, response: nil, status: nil) ⇒ Object



138
139
140
# File 'lib/ai/chat.rb', line 138

def assistant(message, response: nil, status: nil)
  add(message, role: "assistant", response: response, status: status)
end

#generate!Object

:reek:NilCheck :reek:TooManyStatements



144
145
146
147
148
149
150
151
# File 'lib/ai/chat.rb', line 144

def generate!
  validate_api_key unless @api_key_validated
  response = create_response
  parse_response(response)

  @last_response_id = last.dig(:response, :id)
  last
end

#get_items(order: :asc) ⇒ Object



225
226
227
228
229
230
231
# File 'lib/ai/chat.rb', line 225

def get_items(order: :asc)
  raise "No conversation_id set. Call generate! first to create a conversation." unless conversation_id

  raw_items = client.conversations.items.list(conversation_id, order: order)

  Items.new(raw_items, conversation_id: conversation_id)
end

#get_response(wait: false, timeout: 600) ⇒ Object

:reek:BooleanParameter :reek:ControlParameter :reek:DuplicateMethodCall :reek:TooManyStatements



157
158
159
160
161
162
163
164
# File 'lib/ai/chat.rb', line 157

def get_response(wait: false, timeout: 600)
  response = if wait
    wait_for_response(timeout)
  else
    retrieve_response(last_response_id)
  end
  parse_response(response)
end

#inspectObject



264
265
266
# File 'lib/ai/chat.rb', line 264

def inspect
  AI.amazing_print(self, plain: !$stdout.tty?, multiline: true)
end

#inspectable_attributesObject



233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
# File 'lib/ai/chat.rb', line 233

def inspectable_attributes
  attrs = []

  # 1. Model and reasoning (configuration)
  attrs << [:@model, @model]
  attrs << [:@reasoning_effort, @reasoning_effort]

  # 2. Conversation state
  attrs << [:@conversation_id, @conversation_id]
  attrs << [:@last_response_id, @last_response_id] if @last_response_id

  # 3. Messages (the main content, without response details, with truncated data URIs)
  display_messages = @messages.map { |msg| AI.truncate_data_uris(msg.except(:response)) }
  attrs << [:@messages, display_messages]

  # 4. Optional features (only if enabled/changed from default)
  attrs << [:@proxy, @proxy] if @proxy != false
  attrs << [:@image_generation, @image_generation] if @image_generation != false
  attrs << [:@image_folder, @image_folder] if @image_folder != "./images"

  # 5. Optional state (only if set)
  attrs << [:@background, @background] if @background
  attrs << [:@code_interpreter, @code_interpreter] if @code_interpreter
  attrs << [:@web_search, @web_search] if @web_search
  attrs << [:@schema, @schema] if @schema
  attrs << [:@schema_file, @schema_file] if @schema_file
  attrs << [:@verbosity, verbosity] if verbosity

  attrs
end

#lastObject



221
222
223
# File 'lib/ai/chat.rb', line 221

def last
  messages.last
end

#pretty_inspectObject



272
273
274
# File 'lib/ai/chat.rb', line 272

def pretty_inspect
  "#{inspect}\n"
end

#system(message) ⇒ Object



130
131
132
# File 'lib/ai/chat.rb', line 130

def system(message)
  add(message, role: "system")
end

#to_htmlObject



268
269
270
# File 'lib/ai/chat.rb', line 268

def to_html
  AI.wrap_html(AI.amazing_print(self, html: true, multiline: true))
end

#user(message, image: nil, images: nil, file: nil, files: nil) ⇒ Object



134
135
136
# File 'lib/ai/chat.rb', line 134

def user(message, image: nil, images: nil, file: nil, files: nil)
  add(message, role: "user", image: image, images: images, file: file, files: files)
end