Module: AIA::Adapter::MultiModelChat
- Included in:
- RubyLLMAdapter
- Defined in:
- lib/aia/adapter/multi_model_chat.rb
Defined Under Namespace
Classes: MultiModelResponse
Instance Method Summary
collapse
Instance Method Details
#build_consensus_prompt(results) ⇒ Object
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
|
# File 'lib/aia/adapter/multi_model_chat.rb', line 121
def build_consensus_prompt(results)
prompt_parts = []
prompt_parts << "You are tasked with creating a consensus response based on multiple AI model responses to the same query."
prompt_parts << "Please analyze the following responses and provide a unified, comprehensive answer that:"
prompt_parts << "- Incorporates the best insights from all models"
prompt_parts << "- Resolves any contradictions with clear reasoning"
prompt_parts << "- Provides additional context or clarification when helpful"
prompt_parts << "- Maintains accuracy and avoids speculation"
prompt_parts << ""
prompt_parts << "Model responses:"
prompt_parts << ""
results.each do |model_name, result|
content = if result.respond_to?(:content)
result.content
else
result.to_s
end
next if content.start_with?("Error with")
prompt_parts << "#{model_name}:"
prompt_parts << content
prompt_parts << ""
end
prompt_parts << "Please provide your consensus response:"
prompt_parts.join("\n")
end
|
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
|
# File 'lib/aia/adapter/multi_model_chat.rb', line 149
def format_individual_responses(results)
has_metrics = results.values.any? { |r| r.respond_to?(:input_tokens) && r.respond_to?(:output_tokens) }
if has_metrics
format_multi_model_with_metrics(results)
else
output = []
results.each do |internal_id, result|
spec = get_model_spec(internal_id)
display_name = format_model_display_name(spec)
output << "from: #{display_name}"
content = if result.respond_to?(:content)
result.content
else
result.to_s
end
output << content
output << "" end
output.join("\n")
end
end
|
Format display name with instance number and role (ADR-005)
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
|
# File 'lib/aia/adapter/multi_model_chat.rb', line 174
def format_model_display_name(spec)
return spec unless spec.is_a?(Hash)
model_name = spec[:model]
instance = spec[:instance]
role = spec[:role]
display = if instance > 1
"#{model_name} ##{instance}"
else
model_name
end
display += " (#{role})" if role
display
end
|
95
96
97
98
99
100
101
|
# File 'lib/aia/adapter/multi_model_chat.rb', line 95
def format_multi_model_results(results)
if should_use_consensus_mode?
generate_consensus_response(results)
else
format_individual_responses(results)
end
end
|
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
|
# File 'lib/aia/adapter/multi_model_chat.rb', line 192
def format_multi_model_with_metrics(results)
formatted_content = []
metrics_data = []
results.each do |internal_id, result|
spec = get_model_spec(internal_id)
display_name = format_model_display_name(spec)
formatted_content << "from: #{display_name}"
content = result.respond_to?(:content) ? result.content : result.to_s
formatted_content << content
formatted_content << ""
actual_model = spec ? spec[:model] : internal_id
metrics_data << {
model_id: actual_model,
display_name: display_name,
input_tokens: result.respond_to?(:input_tokens) ? result.input_tokens : nil,
output_tokens: result.respond_to?(:output_tokens) ? result.output_tokens : nil
}
end
MultiModelResponse.new(formatted_content.join("\n"), metrics_data)
end
|
#generate_consensus_response(results) ⇒ Object
107
108
109
110
111
112
113
114
115
116
117
118
119
|
# File 'lib/aia/adapter/multi_model_chat.rb', line 107
def generate_consensus_response(results)
primary_model = @models.first
primary_chat = @chats[primary_model]
consensus_prompt = build_consensus_prompt(results)
begin
consensus_result = primary_chat.ask(consensus_prompt).content
"from: #{primary_model}\n#{consensus_result}"
rescue StandardError => e
"Error generating consensus: #{e.message}\n\n" + format_individual_responses(results)
end
end
|
#multi_model_chat(prompt_or_contexts) ⇒ Object
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
|
# File 'lib/aia/adapter/multi_model_chat.rb', line 61
def multi_model_chat(prompt_or_contexts)
results = {}
per_model_contexts = prompt_or_contexts.is_a?(Hash) &&
prompt_or_contexts.keys.all? { |k| @models.include?(k) }
Async do |task|
@models.each do |internal_id|
task.async do
begin
prompt = if per_model_contexts
prompt_or_contexts[internal_id]
else
prompt_or_contexts
end
prompt = prepend_model_role(prompt, internal_id)
result = single_model_chat(prompt, internal_id)
results[internal_id] = result
rescue StandardError => e
results[internal_id] = "Error with #{internal_id}: #{e.message}"
end
end
end
end
format_multi_model_results(results)
end
|
#prepend_model_role(prompt, internal_id) ⇒ Object
Prepend role content to prompt for a specific model (ADR-005)
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
|
# File 'lib/aia/adapter/multi_model_chat.rb', line 24
def prepend_model_role(prompt, internal_id)
spec = get_model_spec(internal_id)
return prompt unless spec && spec[:role]
prompt_handler = AIA::PromptHandler.new
role_content = prompt_handler.load_role_for_model(spec, AIA.config.prompts.role)
return prompt unless role_content
if prompt.is_a?(String)
"#{role_content}\n\n#{prompt}"
elsif prompt.is_a?(Array)
prepend_role_to_conversation(prompt, role_content)
else
prompt
end
end
|
#prepend_role_to_conversation(conversation, role_content) ⇒ Object
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
|
# File 'lib/aia/adapter/multi_model_chat.rb', line 45
def prepend_role_to_conversation(conversation, role_content)
modified = conversation.dup
first_user_index = modified.find_index { |msg| msg[:role] == "user" || msg["role"] == "user" }
if first_user_index
msg = modified[first_user_index].dup
content_key = msg.key?(:content) ? :content : "content"
msg[content_key] = "#{role_content}\n\n#{msg[content_key]}"
modified[first_user_index] = msg
end
modified
end
|
#should_use_consensus_mode? ⇒ Boolean
103
104
105
|
# File 'lib/aia/adapter/multi_model_chat.rb', line 103
def should_use_consensus_mode?
AIA.config.flags.consensus == true
end
|