class Fluent::Plugin::AzureEventHubsOutputSplunk
Constants
- DEFAULT_BUFFER_TYPE
Public Instance Methods
configure(conf)
click to toggle source
Calls superclass method
# File lib/fluent/plugin/out_azureeventhubs_splunk.rb, line 29 def configure(conf) compat_parameters_convert(conf, :buffer, :inject) super case @type when 'amqps' raise NotImplementedError else require_relative 'azureeventhubsplunk/http' @sender = AzureEventHubsSplunkHttpSender.new(self, @connection_string, @hub_name, @expiry_interval, @proxy_addr, @proxy_port, @open_timeout, @read_timeout) end raise Fluent::ConfigError, "'tag' in chunk_keys is required." if not @chunk_key_tag end
format(tag, time, record)
click to toggle source
# File lib/fluent/plugin/out_azureeventhubs_splunk.rb, line 42 def format(tag, time, record) record = inject_values_to_record(tag, time, record) [tag, time, record].to_msgpack end
formatted_to_msgpack_binary?()
click to toggle source
# File lib/fluent/plugin/out_azureeventhubs_splunk.rb, line 47 def formatted_to_msgpack_binary? true end
process_in_batches(records)
click to toggle source
This will need tuning dependent on eventhub/splunk payload limits Also ought to have some error handling
# File lib/fluent/plugin/out_azureeventhubs_splunk.rb, line 70 def process_in_batches(records) records.each_slice(@max_events_per_send).each { |batch| payload = { "records" => batch } log.info "Sending batch of #{batch.size()} records to EventHub..." @sender.send_w_properties(payload, @message_properties) } end
write(chunk)
click to toggle source
# File lib/fluent/plugin/out_azureeventhubs_splunk.rb, line 51 def write(chunk) log.info "Have EventHub chunk to write..." chunk.msgpack_each { |tag, time, record| records ||= [] p record.to_s if @include_tag record['tag'] = tag end if @include_time record[@tag_time_name] = time end records << record } log.info "Processed batch of #{records.size()}. Forwarding to EventHub..." process_in_batches(records) end