class LogStash::Outputs::AzureLogAnalytics

Public Instance Methods

multi_receive(events) click to toggle source
# File lib/logstash/outputs/azure_loganalytics.rb, line 64
def multi_receive(events)
  
  flush_guid = SecureRandom.uuid
  @logger.debug("Start receive: #{flush_guid}. Received #{events.length} events")

  documentsByLogType = {}  # This is a map of log_type to list of documents (themselves maps) to send to Log Analytics
  events.each do |event|
    document = {}
    
    log_type_for_event = event.sprintf(@log_type)

    event_hash = event.to_hash()
    if @key_names.length > 0
      # Get the intersection of key_names and keys of event_hash
      keys_intersection = @key_names & event_hash.keys
      keys_intersection.each do |key|
        if @key_types.include?(key)
          document[key] = convert_value(@key_types[key], event_hash[key])
        else
          document[key] = event_hash[key]
        end
      end
    else
      document = event_hash
    end
    # Skip if document doesn't contain any items
    next if (document.keys).length < 1

    if documentsByLogType[log_type_for_event] == nil then
      documentsByLogType[log_type_for_event] = []
    end
    documentsByLogType[log_type_for_event].push(document)
  end

  # Skip in case there are no candidate documents to deliver
  if documentsByLogType.length < 1
    @logger.debug("No documents in batch. Skipping")
    return
  end

  documentsByLogType.each do |log_type_for_events, events|
    events.each_slice(@max_batch_items) do |event_batch|
      begin
        @logger.debug("Posting log batch (log count: #{event_batch.length}) as log type #{log_type_for_events} to DataCollector API. First log: " + (event_batch[0].to_json).to_s)
        res = @client.post_data(log_type_for_events, event_batch, @time_generated_field)
        if Azure::Loganalytics::Datacollectorapi::Client.is_success(res)
          @logger.debug("Successfully posted logs as log type #{log_type_for_events} with result code #{res.code} to DataCollector API")
        else
          @logger.error("DataCollector API request failure (log type #{log_type_for_events}): error code: #{res.code}, data=>" + (event_batch.to_json).to_s)
        end
      rescue Exception => ex
        @logger.error("Exception occured in posting to DataCollector API as log type #{log_type_for_events}: '#{ex}', data=>" + (event_batch.to_json).to_s)
      end
    end
  end
  @logger.debug("End receive: #{flush_guid}")

end
register() click to toggle source
# File lib/logstash/outputs/azure_loganalytics.rb, line 48
def register
  require 'azure/loganalytics/datacollectorapi/client'

  @key_types.each { |k, v|
    t = v.downcase
    if ( !t.eql?('string') && !t.eql?('double') && !t.eql?('boolean') ) 
      raise ArgumentError, "Key type(#{v}) for key(#{k}) must be either string, boolean, or double"
    end
  }

  ## Start
  @client=Azure::Loganalytics::Datacollectorapi::Client::new(@customer_id,@shared_key,@endpoint)

end

Private Instance Methods

convert_value(type, val) click to toggle source
# File lib/logstash/outputs/azure_loganalytics.rb, line 124
def convert_value(type, val)
  t = type.downcase
  case t
  when "boolean"
    v = val.downcase
    return (v.to_s == 'true' ) ? true : false
  when "double"
    return Integer(val) rescue Float(val) rescue val
  else
    return val
  end
end