module Backup::Config::DSL

Constants

Elasticsearch

Integration with the Backup gem [backup.github.io/backup/v4/]

This extension allows to backup Elasticsearch indices as flat JSON files on the disk.

@example Use the Backup gem's DSL to configure the backup

require 'elasticsearch/extensions/backup'

Model.new(:elasticsearch_backup, 'Elasticsearch') do

  database Elasticsearch do |db|
    db.url     = 'http://localhost:9200'
    db.indices = 'articles,people'
    db.size    = 500
    db.scroll  = '10m'
  end

  store_with Local do |local|
    local.path = '/tmp/backups'
    local.keep = 3
  end

  compress_with Gzip
end

Perform the backup with the Backup gem's command line utility:

$ backup perform -t elasticsearch_backup

The Backup gem can store your backup files on S3, Dropbox and other cloud providers, send notifications about the operation, and so on; read more in the gem documentation.

@example Use the integration as a standalone script (eg. in a Rake task)

require 'backup'
require 'elasticsearch/extensions/backup'

Backup::Logger.configure do
  logfile.enabled   = true
  logfile.log_path  = '/tmp/backups/log'
end; Backup::Logger.start!

backup  = Backup::Model.new(:elasticsearch, 'Backup Elasticsearch') do
  database Backup::Database::Elasticsearch do |db|
    db.indices = 'test'
  end

  store_with Backup::Storage::Local do |local|
    local.path = '/tmp/backups'
  end
end

backup.perform!

@example A simple recover script for the backup created in the previous examples

PATH = '/path/to/backup/'

require 'elasticsearch'
client  = Elasticsearch::Client.new log: true
payload = []

Dir[ File.join( PATH, '**', '*.json' ) ].each do |file|
  document = MultiJson.load(File.read(file))
  item = document.merge(data: document['_source'])
  document.delete('_source')
  document.delete('_score')

  payload << { index: item }

  if payload.size == 100
    client.bulk body: payload
    payload = []
  end

  client.bulk body: payload
end

@see backup.github.io/backup/v4/