Module: LlmCostTracker::Integrations::Anthropic
- Extended by:
- Base
- Defined in:
- lib/llm_cost_tracker/integrations/anthropic.rb
Defined Under Namespace
Modules: MessagesPatch
Class Method Summary collapse
- .finish_stream(collector, errored:) ⇒ Object
- .inference_geo(message:, request:, usage:) ⇒ Object
- .integration_name ⇒ Object
- .minimum_version ⇒ Object
- .patch_targets ⇒ Object
- .pricing_mode(message:, request:, usage:) ⇒ Object
- .record_message(message, request:, latency_ms:) ⇒ Object
- .stream_collector(request) ⇒ Object
- .token_usage(usage:, input_tokens:, output_tokens:) ⇒ Object
- .track_stream(stream, collector:) ⇒ Object
- .version_constant ⇒ Object
Methods included from Base
active?, elapsed_ms, enforce_budget!, install, minimum_version, object_dig, object_value, patch_target, patch_targets, record_safely, request_params, status, version_constant
Class Method Details
.finish_stream(collector, errored:) ⇒ Object
122 123 124 |
# File 'lib/llm_cost_tracker/integrations/anthropic.rb', line 122 def finish_stream(collector, errored:) record_safely { collector.finish!(errored: errored) } end |
.inference_geo(message:, request:, usage:) ⇒ Object
98 99 100 101 102 |
# File 'lib/llm_cost_tracker/integrations/anthropic.rb', line 98 def inference_geo(message:, request:, usage:) object_value(usage, :inference_geo) || object_value(, :inference_geo) || request[:inference_geo] end |
.integration_name ⇒ Object
13 14 15 |
# File 'lib/llm_cost_tracker/integrations/anthropic.rb', line 13 def integration_name :anthropic end |
.minimum_version ⇒ Object
17 18 19 |
# File 'lib/llm_cost_tracker/integrations/anthropic.rb', line 17 def minimum_version "1.36.0" end |
.patch_targets ⇒ Object
25 26 27 28 29 30 31 32 33 34 35 |
# File 'lib/llm_cost_tracker/integrations/anthropic.rb', line 25 def patch_targets [ patch_target("Anthropic::Resources::Messages", with: MessagesPatch, methods: %i[create stream stream_raw]), patch_target( "Anthropic::Resources::Beta::Messages", with: MessagesPatch, methods: %i[create stream stream_raw], optional: true ) ] end |
.pricing_mode(message:, request:, usage:) ⇒ Object
86 87 88 89 90 91 92 93 94 95 96 |
# File 'lib/llm_cost_tracker/integrations/anthropic.rb', line 86 def pricing_mode(message:, request:, usage:) modes = [ Pricing.normalize_mode(object_value(usage, :speed) || object_value(, :speed) || request[:speed]), Pricing.normalize_mode( object_value(usage, :service_tier) || object_value(, :service_tier) || request[:service_tier] ) ] modes << "data_residency" if inference_geo(message: , request: request, usage: usage).to_s == "us" modes = modes.compact.uniq modes.empty? ? nil : modes.join("_") end |
.record_message(message, request:, latency_ms:) ⇒ Object
37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 |
# File 'lib/llm_cost_tracker/integrations/anthropic.rb', line 37 def (, request:, latency_ms:) return unless active? record_safely do usage = object_value(, :usage) next unless usage input_tokens = object_value(usage, :input_tokens) output_tokens = object_value(usage, :output_tokens) next if input_tokens.nil? && output_tokens.nil? LlmCostTracker::Tracker.record( capture: UsageCapture.build( provider: "anthropic", model: object_value(, :model) || request[:model], pricing_mode: pricing_mode(message: , request: request, usage: usage), token_usage: token_usage(usage: usage, input_tokens: input_tokens, output_tokens: output_tokens), usage_source: :sdk_response, provider_response_id: object_value(, :id) ), latency_ms: latency_ms ) end end |
.stream_collector(request) ⇒ Object
115 116 117 118 119 120 |
# File 'lib/llm_cost_tracker/integrations/anthropic.rb', line 115 def stream_collector(request) LlmCostTracker::Capture::StreamCollector.new( provider: "anthropic", model: request[:model] ) end |
.token_usage(usage:, input_tokens:, output_tokens:) ⇒ Object
62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 |
# File 'lib/llm_cost_tracker/integrations/anthropic.rb', line 62 def token_usage(usage:, input_tokens:, output_tokens:) cache_write_1h = object_dig(usage, :cache_creation, :ephemeral_1h_input_tokens).to_i cache_write_5m = object_dig(usage, :cache_creation, :ephemeral_5m_input_tokens) cache_write = if cache_write_5m.nil? total_cache_write = object_value(usage, :cache_creation_input_tokens) [total_cache_write.to_i - cache_write_1h, 0].max else cache_write_5m.to_i end hidden_output = ( object_value(usage, :thinking_tokens, :thinking_output_tokens) || object_dig(usage, :output_tokens_details, :reasoning_tokens) ).to_i TokenUsage.build( input_tokens: input_tokens.to_i, output_tokens: output_tokens.to_i, cache_read_input_tokens: object_value(usage, :cache_read_input_tokens).to_i, cache_write_input_tokens: cache_write, cache_write_1h_input_tokens: cache_write_1h, hidden_output_tokens: hidden_output ) end |
.track_stream(stream, collector:) ⇒ Object
104 105 106 107 108 109 110 111 112 113 |
# File 'lib/llm_cost_tracker/integrations/anthropic.rb', line 104 def track_stream(stream, collector:) return stream unless active? LlmCostTracker::Capture::StreamTracker.new( stream: stream, collector: collector, active: -> { active? }, finish: ->(errored:) { finish_stream(collector, errored: errored) } ).wrap end |
.version_constant ⇒ Object
21 22 23 |
# File 'lib/llm_cost_tracker/integrations/anthropic.rb', line 21 def version_constant "Anthropic::VERSION" end |