forked from pombredanne/es-backup-scripts
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrestore.py
127 lines (109 loc) · 3.16 KB
/
restore.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
#!/usr/bin/python
# Dependencies
import sys
import os
import time
import json
import tarfile
import shutil
import requests
# Help text
if len(sys.argv) < 2:
print "Usage:"
print " python restore.py (indexname)"
print " python restore.py (indexname) (elasticsearch host)"
print " python restore.py (indexname) (elasticsearch host) (elasticsearch port)"
exit(0)
# Get the elasticsearch server
if len(sys.argv) > 2:
host = sys.argv[2]
if len(sys.argv) > 3:
port = sys.argv[3]
else:
port = "9200"
else:
host = "localhost"
port = "9200"
url = "http://%s:%s" % (host, port)
print "Using ElasticSearch at %s" % url
try:
r = requests.get(url)
if r.status_code is not 200:
print "Error hitting ElasticSearch on %s, response code was %i" % (url, r.status_code)
exit(1)
else:
print "Verified ElasticSearch server"
except:
print "Unable to hit ElasticSearch on %s" % url
exit(1)
# Check with the user
index = sys.argv[1]
print "Restoring index '%s'" % index
print "Ctrl+C now to abort..."
time.sleep(3)
# Check the index doesnt already exist
r = requests.get("%s/%s/_mapping" % (url, index))
if r.status_code != 404:
print "The index already exists. Please ensure it does not exist first."
print "This command can be executed to do this:"
print "curl -XDELETE %s/%s" % (url, index)
exit(1)
# Unzip the backup file
filename = "%s.esbackup" % index
tar = tarfile.open(filename)
tar.extractall()
tar.close()
# Read the settings
settings_file = open("%s/settings" % index, "r")
settings = json.loads(settings_file.read())
settings_file.close()
main_index = settings.keys()[0]
settings = settings[main_index]
if 'settings' in settings:
settings = settings["settings"]
# Read the schema
schema_file = open("%s/schema" % index, "r")
schema = json.loads(schema_file.read())
schema_file.close()
schema = schema[main_index]
if 'mappings' in schema:
schema = schema['mappings']
# Create the index on the server
data={}
data["mappings"] = schema
data["settings"] = settings
r = requests.put("%s/%s" % (url, main_index), data=json.dumps(data))
if r.status_code != 200:
print "Unable to put the index to the server (%i), aborting" % r.status_code
print r.content
exit(1)
# Load up the data files and put them all in
data_files = os.listdir("%s/data" % index)
for dfile in data_files:
data_file = open("%s/data/%s" % (index, dfile))
items = json.loads(data_file.read())
data_file.close()
bulk = ""
for item in items:
source = item["_source"]
del item["_source"]
command = {}
command["index"] = item
bulk = bulk + json.dumps(command) + "\n" + json.dumps(source) + "\n"
print "Putting %i items" % len(items)
r = requests.post("%s/_bulk" % url, data=bulk)
if r.status_code != 200:
print "Failed with code %i: %s" % (r.status_code, r.reason)
exit(1)
# Create index alias if needed
if main_index != index:
alias = {}
alias["actions"] = [{"add": {"index": main_index, "alias": index}}]
r = requests.post("%s/_aliases" % url, data = json.dumps(alias))
if r.status_code != 200:
print "Unable to create the alias of the index (%s), aborting" % main_index
print r.content
exit(1)
# Clean up the directory
shutil.rmtree(index)
print "Finished"