Module: LlmCostTracker::Integrations::Openai::ResponsesPatch

Defined in:
lib/llm_cost_tracker/integrations/openai.rb

Instance Method Summary collapse

Instance Method Details

#create(*args, **kwargs) ⇒ Object



107
108
109
110
111
112
113
114
115
116
117
# File 'lib/llm_cost_tracker/integrations/openai.rb', line 107

def create(*args, **kwargs)
  started_at = Process.clock_gettime(Process::CLOCK_MONOTONIC)
  LlmCostTracker::Integrations::Openai.enforce_budget!
  response = super
  LlmCostTracker::Integrations::Openai.record_response(
    response,
    request: LlmCostTracker::Integrations::Openai.request_params(args, kwargs),
    latency_ms: LlmCostTracker::Integrations::Openai.elapsed_ms(started_at)
  )
  response
end

#retrieve_streaming(response_id, *args, **kwargs) ⇒ Object



135
136
137
138
139
140
141
142
# File 'lib/llm_cost_tracker/integrations/openai.rb', line 135

def retrieve_streaming(response_id, *args, **kwargs)
  request = LlmCostTracker::Integrations::Openai.request_params(args, kwargs)
  collector = LlmCostTracker::Integrations::Openai.stream_collector(request)
  collector.provider_response_id = response_id
  LlmCostTracker::Integrations::Openai.enforce_budget!
  stream = super
  LlmCostTracker::Integrations::Openai.track_stream(stream, collector: collector)
end

#stream(*args, **kwargs) ⇒ Object



119
120
121
122
123
124
125
# File 'lib/llm_cost_tracker/integrations/openai.rb', line 119

def stream(*args, **kwargs)
  request = LlmCostTracker::Integrations::Openai.request_params(args, kwargs)
  collector = LlmCostTracker::Integrations::Openai.stream_collector(request)
  LlmCostTracker::Integrations::Openai.enforce_budget!
  stream = super
  LlmCostTracker::Integrations::Openai.track_stream(stream, collector: collector)
end

#stream_raw(*args, **kwargs) ⇒ Object



127
128
129
130
131
132
133
# File 'lib/llm_cost_tracker/integrations/openai.rb', line 127

def stream_raw(*args, **kwargs)
  request = LlmCostTracker::Integrations::Openai.request_params(args, kwargs)
  collector = LlmCostTracker::Integrations::Openai.stream_collector(request)
  LlmCostTracker::Integrations::Openai.enforce_budget!
  stream = super
  LlmCostTracker::Integrations::Openai.track_stream(stream, collector: collector)
end