init
This commit is contained in:
commit
4ae577a648
|
@ -0,0 +1,3 @@
|
|||
/data
|
||||
.idea
|
||||
.venv
|
|
@ -0,0 +1,51 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import requests
|
||||
import json
|
||||
from time import sleep
|
||||
|
||||
es_headers = {'Content-Type': 'application/json'}
|
||||
kibana_headers = {'Content-Type': 'application/json', 'kbn-xsrf': 'true'}
|
||||
|
||||
nginx_pattern = {
|
||||
"index_patterns": ["wownero-block-data-*"],
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"datetime": {
|
||||
"type": "date",
|
||||
"format": "yyyy-MM-dd HH:mm:ss"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
remove_low_disk = {
|
||||
"transient": {
|
||||
"cluster.routing.allocation.disk.watermark.low": "30mb",
|
||||
"cluster.routing.allocation.disk.watermark.high": "20mb",
|
||||
"cluster.routing.allocation.disk.watermark.flood_stage": "10mb",
|
||||
"cluster.info.update.interval": "1m"
|
||||
}
|
||||
}
|
||||
|
||||
def wait_for_es():
|
||||
keep_trying = True
|
||||
while keep_trying:
|
||||
try:
|
||||
requests.get('http://elasticsearch:9200/_cat/health', headers=es_headers)
|
||||
keep_trying = False
|
||||
except:
|
||||
print('[!] Elasticsearch not ready yet....waiting')
|
||||
sleep(15)
|
||||
|
||||
def load_index_settings():
|
||||
# Publish index mappings and settings
|
||||
print('[+] Adding index mapping and cluster settings')
|
||||
r1 = requests.put('http://elasticsearch:9200/_template/wownero-block-data', headers=es_headers, data=json.dumps(nginx_pattern))
|
||||
r2 = requests.put('http://elasticsearch:9200/_cluster/settings', headers=es_headers, data=json.dumps(remove_low_disk))
|
||||
print(r1)
|
||||
print(r2)
|
||||
|
||||
if __name__ == '__main__':
|
||||
wait_for_es()
|
||||
load_index_settings()
|
|
@ -0,0 +1,5 @@
|
|||
FROM python:3
|
||||
RUN python3 -m pip install requests
|
||||
WORKDIR /srv
|
||||
COPY ./files/setup_es.py .
|
||||
ENTRYPOINT ["python3", "-u", "setup_es.py"]
|
|
@ -0,0 +1,36 @@
|
|||
.PHONY: format help
|
||||
|
||||
# Help system from https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
help:
|
||||
@grep -E '^[a-zA-Z0-9_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
|
||||
|
||||
setup: ## Install Python dependencies
|
||||
python3 -m venv .venv
|
||||
.venv/bin/pip install -r requirements.txt
|
||||
|
||||
up: ## Run all the containers in the background
|
||||
mkdir -p data/logs
|
||||
docker-compose up -d --build
|
||||
|
||||
run: ## Run Python script to fetch blocks and update Elasticsearch
|
||||
.venv/bin/python3 main.py
|
||||
|
||||
ps: ## Show currently running containers
|
||||
docker-compose ps
|
||||
|
||||
logs: ## Begin streaming logs to terminal
|
||||
docker-compose logs -f
|
||||
|
||||
stop: ## Stop all containers
|
||||
docker-compose stop
|
||||
|
||||
start: ## Start all containers
|
||||
docker-compose start
|
||||
|
||||
down: ## Stop and remove all containers
|
||||
docker-compose down --remove-orphans
|
||||
|
||||
clean: ## Stop and remove all containers and their data
|
||||
rm -rf ./data/elasticsearch ./data/logstash/
|
|
@ -0,0 +1,28 @@
|
|||
version: '3'
|
||||
services:
|
||||
setup:
|
||||
build:
|
||||
context: ./Dockerfiles
|
||||
dockerfile: setup
|
||||
kibana:
|
||||
image: docker.elastic.co/kibana/kibana:7.1.0
|
||||
ports:
|
||||
- 5601:5601
|
||||
environment:
|
||||
ELASTICSEARCH_HOSTS: http://elasticsearch:9200
|
||||
elasticsearch:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:7.1.0
|
||||
environment:
|
||||
- node.name=es01
|
||||
- cluster.name=wownero-network
|
||||
- discovery.type=single-node
|
||||
- bootstrap.memory_lock=true
|
||||
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
ports:
|
||||
- 9200:9200
|
||||
volumes:
|
||||
- ./data/elasticsearch:/usr/share/elasticsearch/data
|
|
@ -0,0 +1,51 @@
|
|||
import requests
|
||||
import os
|
||||
import arrow
|
||||
import json
|
||||
|
||||
payload = {'jsonrpc': '2.0', 'id': '0', 'method': '', 'params': {}}
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
daemon = 'http://crypto.int.lzahq.tech:34568'
|
||||
json_rpc = daemon + '/json_rpc'
|
||||
data_path = './data/logs'
|
||||
|
||||
def make_payload(rpc_method, params={}):
|
||||
payload['method'] = rpc_method
|
||||
payload['params'] = params
|
||||
return payload
|
||||
|
||||
def make_rpc(rpc_method, http_method='get', _payload={}):
|
||||
if http_method == 'get':
|
||||
req = requests.get
|
||||
else:
|
||||
req = requests.post
|
||||
r = req(
|
||||
json_rpc, timeout=5, headers=headers,
|
||||
json=make_payload(rpc_method, _payload)
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.json()['result']
|
||||
|
||||
def run():
|
||||
top_block = make_rpc('get_block_count', 'get')['count']
|
||||
for block in range(top_block - 1, 0, -1):
|
||||
_path = f'{data_path}/{block}.json'
|
||||
if not os.path.isfile(_path):
|
||||
block_data = make_rpc('get_block', 'get', {'height': block})['block_header']
|
||||
ts = arrow.get(block_data['timestamp'])
|
||||
if block_data['timestamp'] == 0:
|
||||
ts = arrow.get('2018-04-01 04:20:00')
|
||||
block_data['datetime'] = ts.format('YYYY-MM-DD HH:mm:ss')
|
||||
index_date = ts.format('YYYY-MM')
|
||||
index_name = f'wownero-block-data-{index_date}'
|
||||
r = requests.post(
|
||||
f'http://localhost:9200/{index_name}/_doc/{block_data["height"]}',
|
||||
json=block_data, timeout=5, headers=headers
|
||||
)
|
||||
r.raise_for_status()
|
||||
with open(_path, 'w') as f:
|
||||
f.write(json.dumps(block_data))
|
||||
print(f'Added file {_path} for index {index_name}')
|
||||
|
||||
if __name__ == '__main__':
|
||||
run()
|
|
@ -0,0 +1,9 @@
|
|||
arrow==1.1.0
|
||||
certifi==2020.12.5
|
||||
chardet==4.0.0
|
||||
idna==2.10
|
||||
python-dateutil==2.8.1
|
||||
requests==2.25.1
|
||||
six==1.16.0
|
||||
typing-extensions==3.10.0.0
|
||||
urllib3==1.26.4
|
Loading…
Reference in New Issue