forked from cyberFund/ethdrain
-
Notifications
You must be signed in to change notification settings - Fork 1
/
elasticdatastore.py
162 lines (139 loc) · 7.75 KB
/
elasticdatastore.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
import datetime
import logging
from datastore import Datastore
from elasticsearch import Elasticsearch
from elasticsearch import helpers
class ElasticDatastore(Datastore):
TX_INDEX_NAME = "ethereum-transaction"
B_INDEX_NAME = "ethereum-block"
DELTA_BLOCKS = 100000
def __init__(self):
super().__init__()
self.elastic = Elasticsearch([self.es_url], maxsize=self.es_maxsize,
timeout=30, max_retries=10, retry_on_timeout=True)
@classmethod
def config(cls, es_url, es_maxsize):
cls.es_url = es_url
cls.es_maxsize = es_maxsize
def extract(self, rpc_block):
block = rpc_block["result"]
transactions = block["transactions"]
# tx_hashes = list()
# tx_value_sum = 0
block_nb = int(block["number"], 0)
block["timestamp"] = datetime.datetime.fromtimestamp(int(block["timestamp"], 0))
block_timestamp = block["timestamp"]
for tx in transactions:
tx["blockNumber"] = block_nb
tx["blockTimestamp"] = block_timestamp
tx["transactionIndex"] = int(tx["transactionIndex"],0)
# Convert wei into ether
tx["value"] = int(tx["value"], 0) / self.WEI_ETH_FACTOR
# tx_value_sum += tx["value"]
self.actions.append(
{"_index": self.TX_INDEX_NAME, "_type": "tx", "_id": tx["hash"], "_source": {key: tx[key] for key in ["blockNumber",
"blockTimestamp",
"from",
"input",
"to",
"value",
"transactionIndex",
"creates"]}}
)
# tx_hashes.append(tx["hash"])
# block["transactions"] = tx_hashes
# block["number"] = block_nb
# block["gasLimit"] = int(block["gasLimit"], 0)
# block["gasUsed"] = int(block["gasUsed"], 0)
# block["size"] = int(block["size"], 0)
# block["transactionCount"] = len(tx_hashes)
# block["txValueSum"] = tx_value_sum
self.actions.append(
{"_index": self.B_INDEX_NAME, "_type": "b", "_id": block_nb, "_source": {key: block[key] for key in ["timestamp",
"miner",
"uncles"]}}
)
def save(self):
nb_blocks = sum(act["_type"] == "b" for act in self.actions)
nb_txs = sum(act["_type"] == "tx" for act in self.actions)
if self.actions:
try:
helpers.bulk(self.elastic, self.actions)
return "{} blocks and {} transactions indexed".format(
nb_blocks, nb_txs
)
except helpers.BulkIndexError as exception:
print("Issue with {} blocks:\n{}\n".format(nb_blocks, exception))
blocks = (act for act in self.actions if act["_type"] == "b")
for block in blocks:
logging.error("block: " + str(block["_id"]))
@staticmethod
def request_search(url, **kwargs):
return Elasticsearch([url]).search(**kwargs)
@staticmethod
def request_count(url, **kwargs):
return Elasticsearch([url]).count(**kwargs)
@staticmethod
def request_delete(url, **kwargs):
return Elasticsearch([url]).delete_by_query(**kwargs)
@staticmethod
def find_start_block(url):
max_block_number_in_table = ElasticDatastore.request_search(url,
index=ElasticDatastore.B_INDEX_NAME,
doc_type='b',
size=0,
body={"aggs" : {
"max_number" : { "max" : { "field" : "number" } }
}})['aggregations']['max_number']['value']
count_blocks_in_table = ElasticDatastore.request_count(url,
index=ElasticDatastore.B_INDEX_NAME,
doc_type='b'
)['count']
for i in range(int(count_blocks_in_table/ElasticDatastore.DELTA_BLOCKS)):
if max_block_number_in_table + 1 == count_blocks_in_table:
return max_block_number_in_table
elif count_blocks_in_table <= ElasticDatastore.DELTA_BLOCKS:
return 0
else:
count_blocks_in_table -= ElasticDatastore.DELTA_BLOCKS
max_block_number_in_table = ElasticDatastore.request_search(url,
index=ElasticDatastore.B_INDEX_NAME,
doc_type='b',
size=0,
body={"query":{"range": {
"number": {
"lte": count_blocks_in_table
}}},
"aggs" : {
"max_number" : { "max" : { "field" : "number" } }
}})['aggregations']['max_number']['value']
return 0
@staticmethod
def delete_replacement_rows(url, start_block):
try:
ElasticDatastore.request_delete(url,
index=ElasticDatastore.B_INDEX_NAME,
doc_type='b',
body={"query": {
"range": {
"number": {
"gte": start_block
}
}
}},
request_timeout = 3000)
ElasticDatastore.request_delete(url,
index=ElasticDatastore.TX_INDEX_NAME,
doc_type='tx',
body={"query": {
"range": {
"blockNumber": {
"gte": start_block
}
}
}},
request_timeout=3000)
return 1
except Exception as ex:
print ('Exception: ',ex)
return 0