Class: LlmCostTracker::Capture::StreamCollector
- Inherits:
-
Object
- Object
- LlmCostTracker::Capture::StreamCollector
- Defined in:
- lib/llm_cost_tracker/capture/stream_collector.rb
Instance Attribute Summary collapse
-
#provider ⇒ Object
readonly
Returns the value of attribute provider.
Instance Method Summary collapse
- #event(data, type: nil) ⇒ Object
- #finish!(errored: false) ⇒ Object
-
#initialize(provider:, model:, latency_ms: nil, provider_response_id: nil, provider_project_id: nil, provider_api_key_id: nil, provider_workspace_id: nil, batch: nil, pricing_mode: nil, metadata: {}, context_tags: nil) ⇒ StreamCollector
constructor
A new instance of StreamCollector.
- #metadata ⇒ Object
- #model ⇒ Object
- #model=(value) ⇒ Object
- #provider_response_id ⇒ Object
- #provider_response_id=(value) ⇒ Object
- #usage(input_tokens:, output_tokens:, **extra) ⇒ Object
Constructor Details
#initialize(provider:, model:, latency_ms: nil, provider_response_id: nil, provider_project_id: nil, provider_api_key_id: nil, provider_workspace_id: nil, batch: nil, pricing_mode: nil, metadata: {}, context_tags: nil) ⇒ StreamCollector
Returns a new instance of StreamCollector.
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
# File 'lib/llm_cost_tracker/capture/stream_collector.rb', line 15 def initialize(provider:, model:, latency_ms: nil, provider_response_id: nil, provider_project_id: nil, provider_api_key_id: nil, provider_workspace_id: nil, batch: nil, pricing_mode: nil, metadata: {}, context_tags: nil) @provider = provider.to_s @model = model @latency_ms = latency_ms @provider_response_id = provider_response_id @provider_project_id = provider_project_id @provider_api_key_id = provider_api_key_id @provider_workspace_id = provider_workspace_id @batch = batch @pricing_mode = pricing_mode @metadata = ( || {}).deep_dup @context_tags = ( || LlmCostTracker::Tags::Context.).deep_dup @events = [] @captured_bytes = 0 @overflowed = false @explicit_usage = nil @started_at = LlmCostTracker::Timing.now_monotonic @finished = false @mutex = Mutex.new end |
Instance Attribute Details
#provider ⇒ Object (readonly)
Returns the value of attribute provider.
13 14 15 |
# File 'lib/llm_cost_tracker/capture/stream_collector.rb', line 13 def provider @provider end |
Instance Method Details
#event(data, type: nil) ⇒ Object
64 65 66 67 68 69 70 |
# File 'lib/llm_cost_tracker/capture/stream_collector.rb', line 64 def event(data, type: nil) @mutex.synchronize do ensure_open! capture_event(data, type: type) unless data.nil? end self end |
#finish!(errored: false) ⇒ Object
90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 |
# File 'lib/llm_cost_tracker/capture/stream_collector.rb', line 90 def finish!(errored: false) snapshot = @mutex.synchronize do return if @finished @finished = true pricing_mode = Pricing.normalize_mode(@pricing_mode) { events: @events.dup, overflowed: @overflowed, explicit_usage: @explicit_usage, model: @model, latency_ms: @latency_ms, provider_response_id: @provider_response_id, capture_dimensions: capture_dimensions(pricing_mode), pricing_mode: pricing_mode, metadata: @metadata.deep_dup, context_tags: @context_tags.deep_dup } end capture = build_usage_capture(snapshot) provider_response_id = capture.provider_response_id || snapshot[:provider_response_id] capture = capture.with(provider_response_id: provider_response_id) Tracker.record( capture: capture, latency_ms: snapshot[:latency_ms] || LlmCostTracker::Timing.elapsed_ms(@started_at), pricing_mode: snapshot[:pricing_mode], metadata: (errored ? { stream_errored: true } : {}).merge(snapshot[:metadata]), context_tags: snapshot[:context_tags] ) end |
#metadata ⇒ Object
42 43 44 |
# File 'lib/llm_cost_tracker/capture/stream_collector.rb', line 42 def @mutex.synchronize { @metadata.deep_dup } end |
#model ⇒ Object
38 39 40 |
# File 'lib/llm_cost_tracker/capture/stream_collector.rb', line 38 def model @mutex.synchronize { @model } end |
#model=(value) ⇒ Object
50 51 52 53 54 55 |
# File 'lib/llm_cost_tracker/capture/stream_collector.rb', line 50 def model=(value) @mutex.synchronize do ensure_open! @model = value end end |
#provider_response_id ⇒ Object
46 47 48 |
# File 'lib/llm_cost_tracker/capture/stream_collector.rb', line 46 def provider_response_id @mutex.synchronize { @provider_response_id } end |
#provider_response_id=(value) ⇒ Object
57 58 59 60 61 62 |
# File 'lib/llm_cost_tracker/capture/stream_collector.rb', line 57 def provider_response_id=(value) @mutex.synchronize do ensure_open! @provider_response_id = value end end |
#usage(input_tokens:, output_tokens:, **extra) ⇒ Object
72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 |
# File 'lib/llm_cost_tracker/capture/stream_collector.rb', line 72 def usage(input_tokens:, output_tokens:, **extra) @mutex.synchronize do ensure_open! @provider_response_id = extra.delete(:provider_response_id) || @provider_response_id @provider_project_id = extra.delete(:provider_project_id) || @provider_project_id @provider_api_key_id = extra.delete(:provider_api_key_id) || @provider_api_key_id @provider_workspace_id = extra.delete(:provider_workspace_id) || @provider_workspace_id batch = extra.delete(:batch) @batch = batch unless batch.nil? @explicit_usage = TokenUsage.build( **extra.slice(*TokenUsage.members), input_tokens: input_tokens, output_tokens: output_tokens ) end self end |