Class: AIA::ModelDirectives
- Inherits:
-
Directive
- Object
- PM::Directive
- Directive
- AIA::ModelDirectives
show all
- Defined in:
- lib/aia/directives/model_directives.rb
Constant Summary
Constants inherited
from Directive
Directive::DIRECTIVE_PREFIX
Instance Method Summary
collapse
-
#available_models(args = nil, context_manager = nil) ⇒ Object
(also: #am, #available, #models, #all_models, #llms)
-
#compare(args, context_manager = nil) ⇒ Object
(also: #cmp)
-
#format_bytes(bytes) ⇒ Object
-
#show_lms_models(api_base, positive_terms, negative_terms) ⇒ Object
-
#show_local_models(current_models, positive_terms, negative_terms) ⇒ Object
ā helpers (no desc ā not registered) ā.
-
#show_ollama_models(api_base, positive_terms, negative_terms) ⇒ Object
-
#show_rubyllm_models(positive_terms, negative_terms) ⇒ Object
Methods inherited from Directive
build_dispatch_block, help
Instance Method Details
#available_models(args = nil, context_manager = nil) ⇒ Object
Also known as:
am, available, models, all_models, llms
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
|
# File 'lib/aia/directives/model_directives.rb', line 6
def available_models(args = nil, context_manager = nil)
positive_terms, negative_terms = parse_search_terms(Array(args))
current_models = AIA.config.models
model_names = current_models.map do |m|
m.respond_to?(:name) ? m.name : m.to_s
end
using_local_provider = model_names.any? { |m| m.start_with?('ollama/', 'lms/') }
if using_local_provider
show_local_models(model_names, positive_terms, negative_terms)
else
show_rubyllm_models(positive_terms, negative_terms)
end
""
end
|
#compare(args, context_manager = nil) ⇒ Object
Also known as:
cmp
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
|
# File 'lib/aia/directives/model_directives.rb', line 32
def compare(args, context_manager = nil)
return 'Error: No prompt provided for comparison' if args.empty?
prompt = nil
models = []
i = 0
while i < args.length
if args[i] == '--models' && i + 1 < args.length
models = args[i + 1].split(',')
i += 2
else
prompt ||= args[i]
i += 1
end
end
return 'Error: No prompt provided for comparison' unless prompt
return 'Error: No models specified. Use --models model1,model2,model3' if models.empty?
puts "\nComparing responses for: #{prompt}\n"
puts '=' * 80
results = {}
models.each do |model_name|
model_name.strip!
puts "\nš¤ **#{model_name}:**"
puts '-' * 40
begin
chat = RubyLLM.chat(model: model_name)
response = chat.ask(prompt)
content = response.content
puts content
results[model_name] = content
rescue StandardError => e
error_msg = "Error with #{model_name}: #{e.message}"
puts error_msg
results[model_name] = error_msg
end
end
puts '\n' + '=' * 80
puts "\nComparison complete!"
''
end
|
195
196
197
198
199
200
201
202
203
|
# File 'lib/aia/directives/model_directives.rb', line 195
def format_bytes(bytes)
units = ['B', 'KB', 'MB', 'GB', 'TB']
return "0 B" if bytes.zero?
exp = (Math.log(bytes) / Math.log(1024)).to_i
exp = [exp, units.length - 1].min
"%.1f %s" % [bytes.to_f / (1024 ** exp), units[exp]]
end
|
#show_lms_models(api_base, positive_terms, negative_terms) ⇒ Object
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
|
# File 'lib/aia/directives/model_directives.rb', line 151
def show_lms_models(api_base, positive_terms, negative_terms)
begin
uri = URI("#{api_base.gsub(%r{/v1/?$}, '')}/v1/models")
response = Net::HTTP.get_response(uri)
unless response.is_a?(Net::HTTPSuccess)
puts "ā Cannot connect to LM Studio at #{api_base}"
return
end
data = JSON.parse(response.body)
models = data['data'] || []
if models.empty?
puts "No LM Studio models found"
return
end
puts "LM Studio Models (#{api_base}):"
puts "-" * 60
counter = 0
models.each do |model|
name = model['id']
entry = "- lms/#{name}"
entry_down = entry.downcase
show_it = positive_terms.empty? || positive_terms.any? { |q| entry_down.include?(q) }
show_it &&= negative_terms.none? { |q| entry_down.include?(q) }
if show_it
puts entry
counter += 1
end
end
puts
puts "#{counter} LM Studio model(s) available"
puts
rescue StandardError => e
puts "ā Error fetching LM Studio models: #{e.message}"
end
end
|
#show_local_models(current_models, positive_terms, negative_terms) ⇒ Object
ā helpers (no desc ā not registered) ā
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
|
# File 'lib/aia/directives/model_directives.rb', line 85
def show_local_models(current_models, positive_terms, negative_terms)
require 'net/http'
require 'json'
puts "\nLocal LLM Models:"
puts
current_models.each do |model_spec|
if model_spec.start_with?('ollama/')
api_base = ENV.fetch('OLLAMA_API_BASE', 'http://localhost:11434')
api_base = api_base.gsub(%r{/v1/?$}, '')
show_ollama_models(api_base, positive_terms, negative_terms)
elsif model_spec.start_with?('lms/')
api_base = ENV.fetch('LMS_API_BASE', 'http://localhost:1234')
show_lms_models(api_base, positive_terms, negative_terms)
end
end
end
|
#show_ollama_models(api_base, positive_terms, negative_terms) ⇒ Object
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
|
# File 'lib/aia/directives/model_directives.rb', line 104
def show_ollama_models(api_base, positive_terms, negative_terms)
begin
uri = URI("#{api_base}/api/tags")
response = Net::HTTP.get_response(uri)
unless response.is_a?(Net::HTTPSuccess)
puts "ā Cannot connect to Ollama at #{api_base}"
return
end
data = JSON.parse(response.body)
models = data['models'] || []
if models.empty?
puts "No Ollama models found"
return
end
puts "Ollama Models (#{api_base}):"
puts "-" * 60
counter = 0
models.each do |model|
name = model['name']
size = model['size'] ? format_bytes(model['size']) : 'unknown'
modified = model['modified_at'] ? Time.parse(model['modified_at']).strftime('%Y-%m-%d') : 'unknown'
entry = "- ollama/#{name} (size: #{size}, modified: #{modified})"
entry_down = entry.downcase
show_it = positive_terms.empty? || positive_terms.any? { |q| entry_down.include?(q) }
show_it &&= negative_terms.none? { |q| entry_down.include?(q) }
if show_it
puts entry
counter += 1
end
end
puts
puts "#{counter} Ollama model(s) available"
puts
rescue StandardError => e
puts "ā Error fetching Ollama models: #{e.message}"
end
end
|
#show_rubyllm_models(positive_terms, negative_terms) ⇒ Object
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
|
# File 'lib/aia/directives/model_directives.rb', line 205
def show_rubyllm_models(positive_terms, negative_terms)
modality_terms = positive_terms.select { |q| q.include?('_to_') }
text_terms = positive_terms.reject { |q| q.include?('_to_') }
= "\nAvailable LLMs"
+= " for #{positive_terms.join(' and ')}" unless positive_terms.empty?
+= " excluding: #{negative_terms.join(', ')}" unless negative_terms.empty?
puts + ':'
puts
counter = 0
RubyLLM.models.all.each do |llm|
cw = llm.context_window
caps = llm.capabilities.join(',')
inputs = llm.modalities.input.join(',')
outputs = llm.modalities.output.join(',')
mode = "#{inputs} to #{outputs}"
in_1m = llm.pricing.text_tokens.standard.to_h[:input_per_million]
entry = "- #{llm.id} (#{llm.provider}) in: $#{in_1m} cw: #{cw} mode: #{mode} caps: #{caps}"
entry_down = entry.downcase
show_it = true
modality_terms.each { |q| show_it &&= llm.modalities.send("#{q}?") }
text_terms.each { |q| show_it &&= entry_down.include?(q) }
negative_terms.each { |q| show_it &&= !entry_down.include?(q) }
if show_it
counter += 1
puts entry
end
end
puts if counter > 0
puts "#{counter} LLMs matching your query"
puts
end
|