Class: Fluent::Plugin::Sumologic

Inherits:
Output
  • Object
show all
Defined in:
lib/fluent/plugin/out_sumologic.rb

Constant Summary collapse

DEFAULT_BUFFER_TYPE =
"memory"
LOGS_DATA_TYPE =
"logs"
METRICS_DATA_TYPE =
"metrics"
DEFAULT_DATA_TYPE =
LOGS_DATA_TYPE
DEFAULT_METRIC_FORMAT_TYPE =
'graphite'

Instance Method Summary collapse

Constructor Details

#initializeSumologic

Returns a new instance of Sumologic.



189
190
191
# File 'lib/fluent/plugin/out_sumologic.rb', line 189

def initialize
  super
end

Instance Method Details

#configure(conf) ⇒ Object

This method is called before starting.



198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
# File 'lib/fluent/plugin/out_sumologic.rb', line 198

def configure(conf)

  compat_parameters_convert(conf, :buffer)
  super

  unless @endpoint =~ URI::regexp
    raise Fluent::ConfigError, "Invalid SumoLogic endpoint url: #{@endpoint}"
  end

  unless @data_type =~ /\A(?:logs|metrics)\z/
    raise Fluent::ConfigError, "Invalid data_type #{@data_type} must be logs or metrics"
  end

  if @data_type == LOGS_DATA_TYPE
    unless @log_format =~ /\A(?:json|text|json_merge|fields)\z/
      raise Fluent::ConfigError, "Invalid log_format #{@log_format} must be text, json, json_merge or fields"
    end
  end

  if @data_type == METRICS_DATA_TYPE
    unless @metric_data_format =~ /\A(?:graphite|carbon2|prometheus)\z/
      raise Fluent::ConfigError, "Invalid metric_data_format #{@metric_data_format} must be graphite or carbon2 or prometheus"
    end
  end

  @custom_fields = validate_key_value_pairs(@custom_fields)
  if @custom_fields
    @log.debug "Custom fields: #{@custom_fields}"
  end

  @custom_dimensions = validate_key_value_pairs(@custom_dimensions)
  if @custom_dimensions
    @log.debug "Custom dimensions: #{@custom_dimensions}"
  end

  @sumo_conn = SumologicConnection.new(
    @endpoint,
    @verify_ssl,
    @open_timeout,
    @receive_timeout,
    @send_timeout,
    @proxy_uri,
    @disable_cookies,
    @sumo_client,
    @compress,
    @compress_encoding,
    @log,
    )
end

#dump_log(log) ⇒ Object

Strip sumo_metadata and dump to json



275
276
277
278
279
280
281
282
283
284
# File 'lib/fluent/plugin/out_sumologic.rb', line 275

def dump_log(log)
  log.delete('_sumo_metadata')
  begin
    hash = JSON.parse(log[@log_key])
    log[@log_key] = hash
    Yajl.dump(log)
  rescue
    Yajl.dump(log)
  end
end

#format(tag, time, record) ⇒ Object



286
287
288
289
290
291
292
293
# File 'lib/fluent/plugin/out_sumologic.rb', line 286

def format(tag, time, record)
  if defined? time.nsec
    mstime = time * 1000 + (time.nsec / 1000000)
    [mstime, record].to_msgpack
  else
    [time, record].to_msgpack
  end
end

#formatted_to_msgpack_binaryObject



295
296
297
# File 'lib/fluent/plugin/out_sumologic.rb', line 295

def formatted_to_msgpack_binary
  true
end

#log_to_str(log) ⇒ Object

Convert log to string and strip it



322
323
324
325
326
327
328
329
330
331
332
# File 'lib/fluent/plugin/out_sumologic.rb', line 322

def log_to_str(log)
  if log.is_a?(Array) or log.is_a?(Hash)
    log = Yajl.dump(log)
  end

  unless log.nil?
    log.strip!
  end

  return log
end

#merge_json(record) ⇒ Object

Used to merge log record into top level json



259
260
261
262
263
264
265
266
267
268
269
270
271
272
# File 'lib/fluent/plugin/out_sumologic.rb', line 259

def merge_json(record)
  if record.has_key?(@log_key)
    log = record[@log_key].strip
    if log[0].eql?('{') && log[-1].eql?('}')
      begin
        record = record.merge(JSON.parse(log))
        record.delete(@log_key)
      rescue JSON::ParserError
        # do nothing, ignore
      end
    end
  end
  record
end

#multi_workers_ready?Boolean

Returns:

  • (Boolean)


193
194
195
# File 'lib/fluent/plugin/out_sumologic.rb', line 193

def multi_workers_ready?
  true
end

#shutdownObject

This method is called when shutting down.



254
255
256
# File 'lib/fluent/plugin/out_sumologic.rb', line 254

def shutdown
  super
end

#startObject

This method is called when starting.



249
250
251
# File 'lib/fluent/plugin/out_sumologic.rb', line 249

def start
  super
end

#sumo_key(sumo_metadata, chunk) ⇒ Object



299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
# File 'lib/fluent/plugin/out_sumologic.rb', line 299

def sumo_key(, chunk)
  source_name = ['source'] || @source_name
  source_name = extract_placeholders(source_name, chunk) unless source_name.nil?

  source_category = ['category'] || @source_category
  source_category = extract_placeholders(source_category, chunk) unless source_category.nil?

  source_host = ['host'] || @source_host
  source_host = extract_placeholders(source_host, chunk) unless source_host.nil?

  fields = ['fields'] || ""
  fields = extract_placeholders(fields, chunk) unless fields.nil?

  { :source_name => "#{source_name}", :source_category => "#{source_category}",
    :source_host => "#{source_host}", :fields => "#{fields}" }
end

#sumo_timestamp(time) ⇒ Object

Convert timestamp to 13 digit epoch if necessary



317
318
319
# File 'lib/fluent/plugin/out_sumologic.rb', line 317

def sumo_timestamp(time)
  time.to_s.length == 13 ? time : time * 1000
end

#validate_key_value_pairs(fields) ⇒ Object



477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
# File 'lib/fluent/plugin/out_sumologic.rb', line 477

def validate_key_value_pairs(fields)
  if fields.nil?
    return fields
  end

  fields = fields.split(",").select { |field|
    field.split('=').length == 2
  }

  if fields.length == 0
    return nil
  end

  fields.join(',')
end

#write(chunk) ⇒ Object

This method is called every flush interval. Write the buffer chunk



335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
# File 'lib/fluent/plugin/out_sumologic.rb', line 335

def write(chunk)
  messages_list = {}

  # Sort messages
  chunk.msgpack_each do |time, record|
    # plugin dies randomly
    # https://github.com/uken/fluent-plugin-elasticsearch/commit/8597b5d1faf34dd1f1523bfec45852d380b26601#diff-ae62a005780cc730c558e3e4f47cc544R94
    next unless record.is_a? Hash
     = record.fetch('_sumo_metadata', {:source => record[@source_name_key] })
    key           = sumo_key(, chunk)
    log_format    = ['log_format'] || @log_format

    # Strip any unwanted newlines
    record[@log_key].chomp! if record[@log_key] && record[@log_key].respond_to?(:chomp!)

    case @data_type
    when 'logs'
      case log_format
      when 'text'
        if !record.has_key?(@log_key)
          log.warn "log key `#{@log_key}` has not been found in the log"
        end
        log = log_to_str(record[@log_key])
      when 'json_merge'
        if @add_timestamp
          record = { @timestamp_key => sumo_timestamp(time) }.merge(record)
        end
        log = dump_log(merge_json(record))
      when 'fields'
        if @add_timestamp
          record = {  @timestamp_key => sumo_timestamp(time) }.merge(record)
        end
        log = dump_log(record)
      else
        if @add_timestamp
          record = { @timestamp_key => sumo_timestamp(time) }.merge(record)
        end
        log = dump_log(record)
      end
    when 'metrics'
      log = log_to_str(record[@log_key])
    end

    unless log.nil?
      if messages_list.key?(key)
        messages_list[key].push(log)
      else
        messages_list[key] = [log]
      end
    end

  end

  chunk_id = "##{chunk.dump_unique_id_hex(chunk.unique_id)}"
  # Push logs to sumo
  messages_list.each do |key, messages|
    source_name, source_category, source_host, fields = key[:source_name], key[:source_category],
      key[:source_host], key[:fields]

    # Merge custom and record fields
    if fields.nil? || fields.strip.length == 0
      fields = @custom_fields
    else
      fields = [fields,@custom_fields].compact.join(",")
    end

    if @max_request_size <= 0
      messages_to_send = [messages]
    else
      messages_to_send = []
      current_message = []
      current_length = 0
      messages.each do |message|
        current_message.push message
        current_length += message.length

        if current_length > @max_request_size
          messages_to_send.push(current_message)
          current_message = []
          current_length = 0
        end
        current_length += 1  # this is for newline
      end
      if current_message.length > 0
        messages_to_send.push(current_message)
      end
    end
    
    messages_to_send.each_with_index do |message, i|
      retries = 0
      start_time = Time.now
      sleep_time = @retry_min_interval

      while true
        common_log_part = "#{@data_type} records with source category '#{source_category}', source host '#{source_host}', source name '#{source_name}', chunk #{chunk_id}, try #{retries}, batch #{i}"

        begin
          @log.debug { "Sending #{message.count}; #{common_log_part}" }

          @sumo_conn.publish(
            message.join("\n"),
              source_host         =source_host,
              source_category     =source_category,
              source_name         =source_name,
              data_type           =@data_type,
              metric_data_format  =@metric_data_format,
              collected_fields    =fields,
              dimensions          =@custom_dimensions
          )
          break
        rescue => e
          if !@use_internal_retry
            raise e
          end
          # increment retries
          retries += 1

          log.warn "error while sending request to sumo: #{e}; #{common_log_part}"
          log.warn_backtrace e.backtrace

          # drop data if
          #   - we reached out the @retry_max_times retries
          #   - or we exceeded @retry_timeout
          if (retries >= @retry_max_times && @retry_max_times > 0) || (Time.now > start_time + @retry_timeout && @retry_timeout > 0)
            log.warn "dropping records; #{common_log_part}"
            break
          end

          log.info "going to retry to send data at #{Time.now + sleep_time}; #{common_log_part}"
          sleep sleep_time

          sleep_time *= 2
          if sleep_time > @retry_max_interval
            sleep_time = @retry_max_interval
          end
        end
      end
    end
  end

end