Class: LLM::OpenAI::Responses::StreamParser

Inherits:
Object
  • Object
show all
Defined in:
lib/llm/providers/openai/responses/stream_parser.rb

Constant Summary collapse

EMPTY_HASH =
{}.freeze

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(stream) ⇒ LLM::OpenAI::Responses::StreamParser

Parameters:

  • stream (#<<, LLM::Stream)

    A stream sink that implements #<< or the Stream interface



18
19
20
21
22
23
24
25
26
27
28
29
30
# File 'lib/llm/providers/openai/responses/stream_parser.rb', line 18

def initialize(stream)
  @body = {"output" => []}
  @stream = stream
  @emits = {tools: {}}
  @can_emit_content = stream.respond_to?(:on_content)
  @can_emit_reasoning_content = stream.respond_to?(:on_reasoning_content)
  @can_emit_tool_call = stream.respond_to?(:on_tool_call)
  @can_push_content = stream.respond_to?(:<<)
  @cached_output_index = nil
  @cached_output_item = nil
  @cached_content_index = nil
  @cached_content_part = nil
end

Instance Attribute Details

#bodyHash (readonly)

Returns the fully constructed response body

Returns:

  • (Hash)


12
13
14
# File 'lib/llm/providers/openai/responses/stream_parser.rb', line 12

def body
  @body
end

Instance Method Details

#freevoid

This method returns an undefined value.

Frees internal parser state used during streaming.



42
43
44
45
# File 'lib/llm/providers/openai/responses/stream_parser.rb', line 42

def free
  @emits.clear
  clear_cache!
end

#parse!(chunk) ⇒ LLM::OpenAI::Responses::StreamParser

Parameters:

  • chunk (Hash)

Returns:



35
36
37
# File 'lib/llm/providers/openai/responses/stream_parser.rb', line 35

def parse!(chunk)
  tap { handle_event(chunk) }
end