Module: Tracelit::Metrics

Defined in:
lib/tracelit/metrics.rb

Class Method Summary collapse

Class Method Details

.counter(name, description: "", unit: "") ⇒ Object

Exposes a counter for manual instrumentation in user code:

Tracelit::Metrics.counter("orders.placed").add(1)


58
59
60
61
62
63
# File 'lib/tracelit/metrics.rb', line 58

def self.counter(name, description: "", unit: "")
  @meter&.create_counter(name,
    description: description,
    unit: unit
  )
end

.gauge(name, description: "", unit: "") ⇒ Object



72
73
74
75
76
77
# File 'lib/tracelit/metrics.rb', line 72

def self.gauge(name, description: "", unit: "")
  @meter&.create_gauge(name,
    description: description,
    unit: unit
  )
end

.histogram(name, description: "", unit: "") ⇒ Object



65
66
67
68
69
70
# File 'lib/tracelit/metrics.rb', line 65

def self.histogram(name, description: "", unit: "")
  @meter&.create_histogram(name,
    description: description,
    unit: unit
  )
end

.install_connection_pool_pollerObject

Polls ActiveRecord connection pool stats every 30 seconds on a daemon thread and records them as gauges. Does not require a live connection at install time — errors during polling are silently retried next cycle.



201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
# File 'lib/tracelit/metrics.rb', line 201

def self.install_connection_pool_poller
  pool_size = @meter.create_gauge(
    "db.connection_pool.size",
    description: "Maximum connections in the pool",
    unit: "{connections}"
  )

  pool_busy = @meter.create_gauge(
    "db.connection_pool.busy",
    description: "Connections currently checked out",
    unit: "{connections}"
  )

  pool_idle = @meter.create_gauge(
    "db.connection_pool.idle",
    description: "Connections available for checkout",
    unit: "{connections}"
  )

  pool_waiting = @meter.create_gauge(
    "db.connection_pool.waiting",
    description: "Threads waiting for a connection",
    unit: "{threads}"
  )

  thread = Thread.new do
    Thread.current[:tracelit_pool_poller] = true
    loop do
      sleep 30
      begin
        pool  = ActiveRecord::Base.connection_pool
        stat  = pool.stat
        attrs = { "db.system" => pool.pool_config.db_config.adapter.to_s }
        pool_size.record(stat[:size], attributes: attrs)
        pool_busy.record(stat[:busy], attributes: attrs)
        pool_idle.record(stat[:idle], attributes: attrs)
        pool_waiting.record(stat[:waiting], attributes: attrs)
      rescue StandardError
        # Pool may not be connected yet — retry next cycle
      end
    end
  end
  thread.abort_on_exception = false
  thread
rescue StandardError => e
  warn "Tracelit: failed to install connection pool poller: #{e.message}"
end

.install_memory_pollerObject

Polls process RSS memory every 60 seconds on a daemon thread using ps, which works on both macOS (arm64-darwin) and Linux without /proc.



251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
# File 'lib/tracelit/metrics.rb', line 251

def self.install_memory_poller
  memory_gauge = @meter.create_gauge(
    "process.memory.rss",
    description: "Process resident set size (RSS)",
    unit: "MB"
  )

  pid = Process.pid

  thread = Thread.new do
    Thread.current[:tracelit_memory_poller] = true
    loop do
      sleep 60
      begin
        rss_kb = `ps -o rss= -p #{pid} 2>/dev/null`.strip.to_i
        next if rss_kb == 0

        rss_mb = rss_kb / 1024.0
        memory_gauge.record(rss_mb, attributes: {
          "process.pid"     => pid.to_s,
          "process.runtime" => "ruby",
        })
      rescue StandardError
        # Ignore — ps may not be available in all environments
      end
    end
  end
  thread.abort_on_exception = false
  thread
rescue StandardError => e
  warn "Tracelit: failed to install memory poller: #{e.message}"
end

.install_rails_subscriberObject

Subscribes to Rails process_action.action_controller to emit:

http.server.request.count    — counter per request
http.server.request.duration — histogram in milliseconds
http.server.error.count      — counter for 5xx responses
db.query.duration            — histogram for ActiveRecord time per request


84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
# File 'lib/tracelit/metrics.rb', line 84

def self.install_rails_subscriber
  request_counter = @meter.create_counter(
    "http.server.request.count",
    description: "Total HTTP requests processed",
    unit: "{requests}"
  )

  duration_histogram = @meter.create_histogram(
    "http.server.request.duration",
    description: "HTTP request duration",
    unit: "ms"
  )

  error_counter = @meter.create_counter(
    "http.server.error.count",
    description: "Total HTTP 5xx responses",
    unit: "{errors}"
  )

  db_duration_histogram = @meter.create_histogram(
    "db.query.duration",
    description: "Database query duration",
    unit: "ms"
  )

  ActiveSupport::Notifications.subscribe("process_action.action_controller") do |*args|
    event = ActiveSupport::Notifications::Event.new(*args)
    payload = event.payload

    attrs = {
      "http.method"      => payload[:method].to_s,
      "http.route"       => payload[:path].to_s,
      "http.status_code" => payload[:status].to_s,
      "controller"       => payload[:controller].to_s,
      "action"           => payload[:action].to_s,
    }

    request_counter.add(1, attributes: attrs)
    duration_histogram.record(event.duration, attributes: attrs)

    error_counter.add(1, attributes: attrs) if payload[:status].to_i >= 500

    if payload[:db_runtime]
      db_duration_histogram.record(
        payload[:db_runtime].to_f,
        attributes: { "controller" => payload[:controller].to_s }
      )
    end
  rescue StandardError
    # Never let metric errors surface to the application
  end
end

.install_sidekiq_middlewareObject

Installs a Sidekiq server middleware that emits per-job metrics. Uses a dynamically defined class so the instrument references are captured in the closure without global state.



140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
# File 'lib/tracelit/metrics.rb', line 140

def self.install_sidekiq_middleware
  job_counter = @meter.create_counter(
    "sidekiq.job.count",
    description: "Total Sidekiq jobs processed",
    unit: "{jobs}"
  )

  job_duration = @meter.create_histogram(
    "sidekiq.job.duration",
    description: "Sidekiq job execution duration",
    unit: "ms"
  )

  job_error_counter = @meter.create_counter(
    "sidekiq.job.error.count",
    description: "Total Sidekiq jobs that raised an error",
    unit: "{jobs}"
  )

  _job_counter       = job_counter
  _job_duration      = job_duration
  _job_error_counter = job_error_counter

  middleware_class = Class.new do
    define_method(:call) do |_worker, msg, queue, &block|
      start = Process.clock_gettime(Process::CLOCK_MONOTONIC)
      error_raised = false

      begin
        block.call
      rescue StandardError
        error_raised = true
        raise
      ensure
        elapsed_ms = (Process.clock_gettime(Process::CLOCK_MONOTONIC) - start) * 1000.0

        attrs = {
          "sidekiq.job.class" => msg["class"].to_s,
          "sidekiq.queue"     => queue.to_s,
          "sidekiq.status"    => error_raised ? "error" : "success",
        }

        _job_counter.add(1, attributes: attrs)
        _job_duration.record(elapsed_ms, attributes: attrs)
        _job_error_counter.add(1, attributes: attrs) if error_raised
      end
    end
  end

  Sidekiq.configure_server do |config|
    config.server_middleware do |chain|
      chain.add middleware_class
    end
  end
rescue StandardError => e
  warn "Tracelit: failed to install Sidekiq middleware: #{e.message}"
end

.meterObject



52
53
54
# File 'lib/tracelit/metrics.rb', line 52

def self.meter
  @meter
end

.setup(config) ⇒ Object

Sets up the OpenTelemetry MeterProvider with OTLP exporter. Called once from Instrumentation.setup after trace setup.



11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
# File 'lib/tracelit/metrics.rb', line 11

def self.setup(config)
  # Force delta temporality for all instruments. The SDK aggregation classes
  # (Sum, ExplicitBucketHistogram) read this env var at construction time;
  # there is no constructor keyword on MetricsExporter for this in v0.8.0.
  ENV["OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE"] = "delta"

  exporter = OpenTelemetry::Exporter::OTLP::Metrics::MetricsExporter.new(
    endpoint: "#{config.endpoint}/v1/metrics",
    headers: {
      "Authorization"  => "Bearer #{config.api_key}",
      "X-Service-Name" => config.resolved_service_name,
      "X-Environment"  => config.environment,
    }
  )

  reader = OpenTelemetry::SDK::Metrics::Export::PeriodicMetricReader.new(
    exporter: exporter,
    export_interval_millis: 60_000,
    export_timeout_millis:  10_000
  )

  provider = OpenTelemetry::SDK::Metrics::MeterProvider.new(
    resource: OpenTelemetry.tracer_provider.resource
  )
  provider.add_metric_reader(reader)

  OpenTelemetry.meter_provider = provider

  @meter = provider.meter(
    config.resolved_service_name,
    version: Tracelit::VERSION
  )

  install_rails_subscriber       if defined?(::Rails)
  install_sidekiq_middleware      if defined?(::Sidekiq)
  install_connection_pool_poller  if defined?(::ActiveRecord)
  install_memory_poller
rescue StandardError => e
  OpenTelemetry.logger.warn("Tracelit: failed to set up metrics: #{e.message}")
end