Module: Legion::Telemetry::OpenInference

Defined in:
lib/legion/telemetry/open_inference.rb

Constant Summary collapse

DEFAULT_TRUNCATE =
4096

Class Method Summary collapse

Class Method Details

.agent_span(name:, mode: nil, phase_count: nil, budget_ms: nil) ⇒ Object



131
132
133
134
135
136
137
138
139
140
141
142
143
144
# File 'lib/legion/telemetry/open_inference.rb', line 131

def agent_span(name:, mode: nil, phase_count: nil, budget_ms: nil, &)
  unless open_inference_enabled?
    return yield(nil) if block_given?

    return
  end

  attrs = base_attrs('AGENT').merge('agent.name' => name)
  attrs['agent.mode'] = mode.to_s if mode
  attrs['agent.phase_count'] = phase_count if phase_count
  attrs['agent.budget_ms'] = budget_ms if budget_ms

  Legion::Telemetry.with_span("agent.#{name}", kind: :internal, attributes: attrs, &)
end

.annotate_eval_result(span, result) ⇒ Object



234
235
236
237
238
239
240
241
242
243
# File 'lib/legion/telemetry/open_inference.rb', line 234

def annotate_eval_result(span, result)
  return unless span.respond_to?(:set_attribute)

  span.set_attribute('eval.score', result[:score]) if result[:score]
  span.set_attribute('eval.passed', result[:passed]) unless result[:passed].nil?
  span.set_attribute('eval.explanation', result[:explanation]) if result[:explanation]
rescue StandardError => e
  Legion::Logging.debug "OpenInference#annotate_eval_result failed: #{e.message}" if defined?(Legion::Logging)
  nil
end

.annotate_guardrail_result(span, result) ⇒ Object



245
246
247
248
249
250
251
252
253
254
# File 'lib/legion/telemetry/open_inference.rb', line 245

def annotate_guardrail_result(span, result)
  return unless span.respond_to?(:set_attribute)

  span.set_attribute('guardrail.passed', result[:passed]) unless result[:passed].nil?
  span.set_attribute('guardrail.score', result[:score]) unless result[:score].nil?
  span.set_attribute('output.value', truncate_value(result[:explanation].to_s)) if include_io? && result[:explanation]
rescue StandardError => e
  Legion::Logging.debug "OpenInference#annotate_guardrail_result failed: #{e.message}" if defined?(Legion::Logging)
  nil
end

.annotate_llm_result(span, result) ⇒ Object



206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
# File 'lib/legion/telemetry/open_inference.rb', line 206

def annotate_llm_result(span, result)
  return unless span.respond_to?(:set_attribute) && result.is_a?(Hash)

  # OpenInference attributes
  span.set_attribute('llm.token_count.prompt', result[:input_tokens]) if result[:input_tokens]
  span.set_attribute('llm.token_count.completion', result[:output_tokens]) if result[:output_tokens]
  span.set_attribute('output.value', truncate_value(result[:content].to_s)) if include_io? && result[:content]

  # GenAI semantic convention attributes
  span.set_attribute('gen_ai.usage.input_tokens', result[:input_tokens]) if result[:input_tokens]
  span.set_attribute('gen_ai.usage.output_tokens', result[:output_tokens]) if result[:output_tokens]
  span.set_attribute('gen_ai.response.finish_reason', result[:stop_reason].to_s) if result[:stop_reason]
  span.set_attribute('gen_ai.response.model', result[:model].to_s) if result[:model]
rescue StandardError => e
  Legion::Logging.debug "OpenInference#annotate_llm_result failed: #{e.message}" if defined?(Legion::Logging)
  nil
end

.annotate_output(span, result) ⇒ Object



224
225
226
227
228
229
230
231
232
# File 'lib/legion/telemetry/open_inference.rb', line 224

def annotate_output(span, result)
  return unless span.respond_to?(:set_attribute)

  val = result.is_a?(Hash) ? result.to_json : result.to_s
  span.set_attribute('output.value', truncate_value(val))
rescue StandardError => e
  Legion::Logging.debug "OpenInference#annotate_output failed: #{e.message}" if defined?(Legion::Logging)
  nil
end

.base_attrs(kind) ⇒ Object



202
203
204
# File 'lib/legion/telemetry/open_inference.rb', line 202

def base_attrs(kind)
  { 'openinference.span.kind' => kind }
end

.chain_span(type: 'task_chain', relationship_id: nil) ⇒ Object



102
103
104
105
106
107
108
109
110
111
112
113
# File 'lib/legion/telemetry/open_inference.rb', line 102

def chain_span(type: 'task_chain', relationship_id: nil, &)
  unless open_inference_enabled?
    return yield(nil) if block_given?

    return
  end

  attrs = base_attrs('CHAIN').merge('chain.type' => type)
  attrs['chain.relationship_id'] = relationship_id if relationship_id

  Legion::Telemetry.with_span("chain.#{type}", kind: :internal, attributes: attrs, &)
end

.embedding_span(model:, dimensions: nil) ⇒ Object



71
72
73
74
75
76
77
78
79
80
81
82
83
# File 'lib/legion/telemetry/open_inference.rb', line 71

def embedding_span(model:, dimensions: nil, &)
  unless open_inference_enabled?
    return yield(nil) if block_given?

    return
  end

  attrs = base_attrs('EMBEDDING').merge('embedding.model_name' => model)
  attrs['embedding.dimensions'] = dimensions if dimensions
  attrs.merge!(genai_attrs(model: model, provider: 'embedding'))

  Legion::Telemetry.with_span("embedding.#{model}", kind: :client, attributes: attrs, &)
end

.evaluator_span(template:) ⇒ Object



115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
# File 'lib/legion/telemetry/open_inference.rb', line 115

def evaluator_span(template:)
  unless open_inference_enabled?
    return yield(nil) if block_given?

    return
  end

  attrs = base_attrs('EVALUATOR').merge('eval.template' => template)

  Legion::Telemetry.with_span("eval.#{template}", kind: :internal, attributes: attrs) do |span|
    result = yield(span)
    annotate_eval_result(span, result) if span && result.is_a?(Hash)
    result
  end
end

.genai_attrs(model:, provider: nil) ⇒ Object



196
197
198
199
200
# File 'lib/legion/telemetry/open_inference.rb', line 196

def genai_attrs(model:, provider: nil)
  h = { 'gen_ai.request.model' => model }
  h['gen_ai.system'] = provider if provider
  h
end

.guardrail_span(name:, input: nil) ⇒ Object



174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
# File 'lib/legion/telemetry/open_inference.rb', line 174

def guardrail_span(name:, input: nil)
  unless open_inference_enabled?
    return yield(nil) if block_given?

    return
  end

  attrs = base_attrs('GUARDRAIL').merge('guardrail.name' => name)
  attrs['input.value'] = truncate_value(input.to_s) if input && include_io?

  Legion::Telemetry.with_span("guardrail.#{name}", kind: :internal, attributes: attrs) do |span|
    result = yield(span)
    annotate_guardrail_result(span, result) if span && result.is_a?(Hash)
    result
  end
end

.include_io?Boolean

Returns:

  • (Boolean)


25
26
27
28
29
30
31
32
33
34
35
36
# File 'lib/legion/telemetry/open_inference.rb', line 25

def include_io?
  settings = begin
    Legion::Settings.dig(:telemetry, :open_inference)
  rescue StandardError => e
    Legion::Logging.debug "OpenInference#include_io? failed to read settings: #{e.message}" if defined?(Legion::Logging)
    {}
  end
  settings.is_a?(Hash) ? settings.fetch(:include_input_output, true) : true
rescue StandardError => e
  Legion::Logging.debug "OpenInference#include_io? failed: #{e.message}" if defined?(Legion::Logging)
  true
end

.llm_span(model:, provider: nil, invocation_params: {}, input: nil) ⇒ Object



51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
# File 'lib/legion/telemetry/open_inference.rb', line 51

def llm_span(model:, provider: nil, invocation_params: {}, input: nil)
  unless open_inference_enabled?
    return yield(nil) if block_given?

    return
  end

  attrs = base_attrs('LLM').merge('llm.model_name' => model)
  attrs['llm.provider'] = provider if provider
  attrs['llm.invocation_parameters'] = invocation_params.to_json unless invocation_params.empty?
  attrs['input.value'] = truncate_value(input.to_s) if input && include_io?
  attrs.merge!(genai_attrs(model: model, provider: provider))

  Legion::Telemetry.with_span("llm.#{model}", kind: :client, attributes: attrs) do |span|
    result = yield(span)
    annotate_llm_result(span, result) if span
    result
  end
end

.open_inference_enabled?Boolean

Returns:

  • (Boolean)


10
11
12
13
14
15
16
17
18
19
20
21
22
23
# File 'lib/legion/telemetry/open_inference.rb', line 10

def open_inference_enabled?
  return false unless Legion::Telemetry.enabled?

  settings = begin
    Legion::Settings.dig(:telemetry, :open_inference)
  rescue StandardError => e
    Legion::Logging.debug "OpenInference#open_inference_enabled? failed to read settings: #{e.message}" if defined?(Legion::Logging)
    {}
  end
  settings.is_a?(Hash) ? settings.fetch(:enabled, true) : true
rescue StandardError => e
  Legion::Logging.debug "OpenInference#open_inference_enabled? failed: #{e.message}" if defined?(Legion::Logging)
  false
end

.reranker_span(model:, query: nil, top_k: nil) ⇒ Object



160
161
162
163
164
165
166
167
168
169
170
171
172
# File 'lib/legion/telemetry/open_inference.rb', line 160

def reranker_span(model:, query: nil, top_k: nil, &)
  unless open_inference_enabled?
    return yield(nil) if block_given?

    return
  end

  attrs = base_attrs('RERANKER').merge('reranker.model_name' => model)
  attrs['reranker.top_k'] = top_k if top_k
  attrs['input.value'] = truncate_value(query.to_s) if query && include_io?

  Legion::Telemetry.with_span("reranker.#{model}", kind: :internal, attributes: attrs, &)
end

.retriever_span(name:, query: nil, top_k: nil) ⇒ Object



146
147
148
149
150
151
152
153
154
155
156
157
158
# File 'lib/legion/telemetry/open_inference.rb', line 146

def retriever_span(name:, query: nil, top_k: nil, &)
  unless open_inference_enabled?
    return yield(nil) if block_given?

    return
  end

  attrs = base_attrs('RETRIEVER').merge('retriever.name' => name)
  attrs['retriever.top_k'] = top_k if top_k
  attrs['input.value'] = truncate_value(query.to_s) if query && include_io?

  Legion::Telemetry.with_span("retriever.#{name}", kind: :client, attributes: attrs, &)
end

.tool_span(name:, parameters: {}) ⇒ Object



85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
# File 'lib/legion/telemetry/open_inference.rb', line 85

def tool_span(name:, parameters: {})
  unless open_inference_enabled?
    return yield(nil) if block_given?

    return
  end

  attrs = base_attrs('TOOL').merge('tool.name' => name)
  attrs['tool.parameters'] = parameters.to_json unless parameters.empty?

  Legion::Telemetry.with_span("tool.#{name}", kind: :internal, attributes: attrs) do |span|
    result = yield(span)
    annotate_output(span, result) if span && include_io?
    result
  end
end

.truncate_limitObject



38
39
40
41
42
43
44
45
46
47
48
49
# File 'lib/legion/telemetry/open_inference.rb', line 38

def truncate_limit
  settings = begin
    Legion::Settings.dig(:telemetry, :open_inference)
  rescue StandardError => e
    Legion::Logging.debug "OpenInference#truncate_limit failed to read settings: #{e.message}" if defined?(Legion::Logging)
    {}
  end
  settings.is_a?(Hash) ? settings.fetch(:truncate_values_at, DEFAULT_TRUNCATE) : DEFAULT_TRUNCATE
rescue StandardError => e
  Legion::Logging.debug "OpenInference#truncate_limit failed: #{e.message}" if defined?(Legion::Logging)
  DEFAULT_TRUNCATE
end

.truncate_value(str, max: nil) ⇒ Object



191
192
193
194
# File 'lib/legion/telemetry/open_inference.rb', line 191

def truncate_value(str, max: nil)
  limit = max || truncate_limit
  str.length > limit ? str[0...limit] : str
end