Class: SmartPrompt::Conversation

Inherits:
Object
  • Object
show all
Includes:
APIHandler
Defined in:
lib/smart_prompt/conversation.rb

Constant Summary

Constants included from APIHandler

APIHandler::MAX_RETRIES, APIHandler::RETRY_OPTIONS

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(engine, tools = nil) ⇒ Conversation

Returns a new instance of Conversation.



11
12
13
14
15
16
17
18
19
20
21
22
23
24
# File 'lib/smart_prompt/conversation.rb', line 11

def initialize(engine, tools = nil)
  SmartPrompt.logger.info "Create Conversation"
  @messages = []
  @engine = engine
  @adapters = engine.adapters
  @llms = engine.llms
  @models = engine.models
  @current_llm_name = nil
  @templates = engine.templates
  @temperature = 0.7
  @current_adapter = engine.current_adapter
  @last_response = nil
  @tools = tools
end

Instance Attribute Details

#config_fileObject (readonly)

Returns the value of attribute config_file.



8
9
10
# File 'lib/smart_prompt/conversation.rb', line 8

def config_file
  @config_file
end

#last_call_idObject (readonly)

Returns the value of attribute last_call_id.



9
10
11
# File 'lib/smart_prompt/conversation.rb', line 9

def last_call_id
  @last_call_id
end

#last_responseObject (readonly)

Returns the value of attribute last_response.



8
9
10
# File 'lib/smart_prompt/conversation.rb', line 8

def last_response
  @last_response
end

#messagesObject (readonly)

Returns the value of attribute messages.



8
9
10
# File 'lib/smart_prompt/conversation.rb', line 8

def messages
  @messages
end

Instance Method Details

#add_message(msg, with_history = false) ⇒ Object



61
62
63
64
65
66
# File 'lib/smart_prompt/conversation.rb', line 61

def add_message(msg, with_history = false)
  if with_history
    history_messages << msg
  end
  @messages << msg
end

#embeddings(length) ⇒ Object



140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
# File 'lib/smart_prompt/conversation.rb', line 140

def embeddings(length)
  Retriable.retriable(RETRY_OPTIONS) do
    raise ConfigurationError, "No LLM selected" if @current_llm.nil?
    text = ""
    @messages.each do |msg|
      if msg[:role] == "user"
        text = msg[:content]
      end
    end
    @last_response = @current_llm.embeddings(text, @model_name)
    @messages = []
    @messages << { role: "system", content: @sys_msg }
    normalize(@last_response, length)
  end
end

#history_messagesObject



57
58
59
# File 'lib/smart_prompt/conversation.rb', line 57

def history_messages
  @engine.history_messages
end

#model(model_name) ⇒ Object



49
50
51
# File 'lib/smart_prompt/conversation.rb', line 49

def model(model_name)
  @model_name = model_name
end

#normalize(x, length) ⇒ Object



130
131
132
133
134
135
136
137
138
# File 'lib/smart_prompt/conversation.rb', line 130

def normalize(x, length)
  if x.length > length
    x = Numo::NArray.cast(x[0..length - 1])
    norm = Math.sqrt((x * x).sum)
    return (x / norm).to_a
  else
    return x.concat([0] * (x.length - length))
  end
end

#prompt(template_name, params = {}, with_history: false) ⇒ Object



68
69
70
71
72
73
74
75
76
77
78
79
80
# File 'lib/smart_prompt/conversation.rb', line 68

def prompt(template_name, params = {}, with_history: false)
  if template_name.class == Symbol
    template_name = template_name.to_s
    SmartPrompt.logger.info "Use template #{template_name}"
    raise "Template #{template_name} not found" unless @templates.key?(template_name)
    content = @templates[template_name].render(params)
    add_message({ role: "user", content: content }, with_history)
    self
  else
    add_message({ role: "user", content: template_name }, with_history)
    self
  end
end

#send_msg(params = {}) ⇒ Object



96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
# File 'lib/smart_prompt/conversation.rb', line 96

def send_msg(params = {})
  Retriable.retriable(RETRY_OPTIONS) do
    raise ConfigurationError, "No LLM selected" if @current_llm.nil?
    if params[:with_history]
      @last_response = @current_llm.send_request(history_messages, @model_name, @temperature, @tools, nil)
    else
      @last_response = @current_llm.send_request(@messages, @model_name, @temperature, @tools, nil)
    end
    if @last_response == ""
      @last_response = @current_llm.last_response
    end
    @messages = []
    @messages << { role: "system", content: @sys_msg }
    @last_response
  end
rescue => e
  return "Failed to call LLM after #{MAX_RETRIES} attempts: #{e.message}"
end

#send_msg_by_stream(params = {}, &proc) ⇒ Object



115
116
117
118
119
120
121
122
123
124
125
126
127
128
# File 'lib/smart_prompt/conversation.rb', line 115

def send_msg_by_stream(params = {}, &proc)
  Retriable.retriable(RETRY_OPTIONS) do
    raise ConfigurationError, "No LLM selected" if @current_llm.nil?
    if params[:with_history]
      @current_llm.send_request(history_messages, @model_name, @temperature, @tools, proc)
    else
      @current_llm.send_request(@messages, @model_name, @temperature, @tools, proc)
    end
    @messages = []
    @messages << { role: "system", content: @sys_msg }
  end
rescue => e
  return "Failed to call LLM after #{MAX_RETRIES} attempts: #{e.message}"
end

#send_msg_onceObject



88
89
90
91
92
93
94
# File 'lib/smart_prompt/conversation.rb', line 88

def send_msg_once
  raise "No LLM selected" if @current_llm.nil?
  @last_response = @current_llm.send_request(@messages, @model_name, @temperature)
  @messages = []
  @messages << { role: "system", content: @sys_msg }
  @last_response
end

#sys_msg(message, params) ⇒ Object



82
83
84
85
86
# File 'lib/smart_prompt/conversation.rb', line 82

def sys_msg(message, params)
  @sys_msg = message
  add_message({ role: "system", content: message }, params[:with_history])
  self
end

#temperature(temperature) ⇒ Object



53
54
55
# File 'lib/smart_prompt/conversation.rb', line 53

def temperature(temperature)
  @temperature = temperature
end

#use(llm_name) ⇒ Object

Raises:



26
27
28
29
30
31
32
# File 'lib/smart_prompt/conversation.rb', line 26

def use(llm_name)
  llm_name = llm_name.to_s
  raise ConfigurationError, "LLM #{llm_name} not configured" unless @llms.key?(llm_name)
  @current_llm = @llms[llm_name]
  @current_llm_name = llm_name
  self
end

#use_model(model_name) ⇒ Object

Raises:



34
35
36
37
38
39
40
41
42
43
44
45
46
47
# File 'lib/smart_prompt/conversation.rb', line 34

def use_model(model_name)
  model_name = model_name.to_s
  model_config = @models[model_name] || @models[model_name.to_sym]
  raise ConfigurationError, "Model #{model_name} not configured" unless model_config

  llm_name = model_config["use"] || model_config[:use]
  configured_model_name = model_config["model"] || model_config[:model]
  raise ConfigurationError, "Model #{model_name} must define use" if llm_name.nil? || llm_name.empty?
  raise ConfigurationError, "Model #{model_name} must define model" if configured_model_name.nil? || configured_model_name.empty?

  use(llm_name)
  model(configured_model_name)
  self
end