Class: ThinkingData::TDBatchConsumer

Inherits:
Object
  • Object
show all
Defined in:
lib/thinkingdata-ruby/td_batch_consumer.rb

Overview

Upload data by http

Constant Summary collapse

DEFAULT_LENGTH =

buffer count

20
MAX_LENGTH =
2000

Instance Method Summary collapse

Constructor Details

#initialize(server_url, app_id, max_buffer_length = DEFAULT_LENGTH) ⇒ TDBatchConsumer

Init batch consumer



16
17
18
19
20
21
22
23
24
25
# File 'lib/thinkingdata-ruby/td_batch_consumer.rb', line 16

def initialize(server_url, app_id, max_buffer_length = DEFAULT_LENGTH)
  @server_uri = URI.parse(server_url)
  @server_uri.path = '/sync_server'
  @app_id = app_id
  @compress = true
  @max_length = [max_buffer_length, MAX_LENGTH].min
  @buffers = []
  @mutex = Mutex.new
  TDLog.info("TDBatchConsumer init success. ServerUrl: #{server_url}, appId: #{app_id}")
end

Instance Method Details

#_set_compress(compress) ⇒ Object

Deprecated.

please use: set_compress

http request compress

Parameters:

  • compress (Boolean)

    compress or not



31
32
33
# File 'lib/thinkingdata-ruby/td_batch_consumer.rb', line 31

def _set_compress(compress)
  @compress = compress
end

#add(message) ⇒ Object



42
43
44
45
46
47
48
49
50
# File 'lib/thinkingdata-ruby/td_batch_consumer.rb', line 42

def add(message)
  TDLog.info("Enqueue data to buffer. buffer size: #{@buffers.length}, data: #{message}")
  need_flush = false
  @mutex.synchronize do
    @buffers << message
    need_flush = @buffers.length >= @max_length
  end
  flush if need_flush
end

#closeObject



52
53
54
55
# File 'lib/thinkingdata-ruby/td_batch_consumer.rb', line 52

def close
  flush
  TDLog.info("TDBatchConsumer close.")
end

#flushObject



57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
# File 'lib/thinkingdata-ruby/td_batch_consumer.rb', line 57

def flush
  TDLog.info("TDBatchConsumer flush data.")
  data_to_send = nil
  @mutex.synchronize do
    data_to_send = @buffers.dup
    @buffers = []
  end

  return if data_to_send.empty?

  begin
    data_to_send.each_slice(@max_length) do |chunk|
      if @compress
        wio = StringIO.new("w")
        gzip_io = Zlib::GzipWriter.new(wio)
        gzip_io.write(chunk.to_json)
        gzip_io.close
        data = wio.string
      else
        data = chunk.to_json
      end
      compress_type = @compress ? 'gzip' : 'none'
      headers = {'Content-Type' => 'application/plaintext',
                 'appid' => @app_id,
                 'compress' => compress_type,
                 'TE-Integration-Type'=>'Ruby',
                 'TE-Integration-Version'=>ThinkingData::VERSION,
                 'TE-Integration-Count'=>data_to_send.count,
                 'TA_Integration-Extra'=>'batch'}
      request = CaseSensitivePost.new(@server_uri.request_uri, headers)
      request.body = data

      TDLog.info("Send data, request: #{data}")
      begin
        response_code, response_body = _request(@server_uri, request)
        TDLog.info("Send data, response: #{response_body}")
      rescue => e
        raise ConnectionError.new("Could not connect to TE server, with error \"#{e.message}\".")
      end

      result = {}
      if response_code.to_i == 200
        begin
          result = JSON.parse(response_body.to_s)
        rescue JSON::JSONError
          raise ServerError.new("Could not interpret TE server response: '#{response_body}'")
        end
      end

      if result['code'] != 0
        raise ServerError.new("Could not write to TE, server responded with #{response_code} returning: '#{response_body}'")
      end
    end
  rescue
    raise
  end
end

#set_compress(compress) ⇒ Object

http request compress

Parameters:

  • compress (Boolean)

    compress or not



38
39
40
# File 'lib/thinkingdata-ruby/td_batch_consumer.rb', line 38

def set_compress(compress)
  @compress = compress
end