-
Notifications
You must be signed in to change notification settings - Fork 27
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #35 from KoleBarnes/feature/fastapi
Add support for running as an API server.
- Loading branch information
Showing
18 changed files
with
548 additions
and
162 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
# Rest API | ||
|
||
To run [fetch validator](README.md) as a webAPI `cd fetch-validator-status` and `IM=1 ./run.sh --web -v` to start the server. | ||
To run in debug mode add `--debug`. | ||
|
||
## How To Use | ||
|
||
After running the command above. Go to http://localhost:8080/ in your browser. Then click on one of the colored drop downs and click the 'Try it out' button. Fill out any required fields then click 'execute'. This will give you a response with a, curl command, request url, and response body. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,139 +1,55 @@ | ||
import argparse | ||
import asyncio | ||
# import base58 | ||
# import base64 | ||
import json | ||
import os | ||
import sys | ||
# import datetime | ||
import urllib.request | ||
# from typing import Tuple | ||
|
||
# import nacl.signing | ||
|
||
import indy_vdr | ||
from indy_vdr.ledger import ( | ||
build_get_validator_info_request, | ||
build_get_txn_request, | ||
# Request, | ||
) | ||
from indy_vdr.pool import open_pool | ||
from util import log | ||
from plugin_collection import PluginCollection | ||
# import time | ||
from DidKey import DidKey | ||
from pool import PoolCollection | ||
from singleton import Singleton | ||
|
||
class NodeNotFound(Exception): | ||
pass | ||
|
||
class FetchStatus(object, metaclass=Singleton): | ||
def __init__(self, verbose, pool_collection: PoolCollection): | ||
self.verbose = verbose | ||
self.pool_collection = pool_collection | ||
|
||
async def fetch(self, network_id: str, monitor_plugins: PluginCollection, nodes: str = None, ident: DidKey = None): | ||
result = [] | ||
verifiers = {} | ||
|
||
pool, network_name = await self.pool_collection.get_pool(network_id) | ||
if ident: | ||
log(f"Building request with did: {ident.did} ...") | ||
request = build_get_validator_info_request(ident.did) | ||
ident.sign_request(request) | ||
else: | ||
log("Building an anonymous request ...") | ||
request = build_get_txn_request(None, 1, 1) | ||
|
||
from_nodes = [] | ||
if nodes: | ||
from_nodes = nodes.split(",") | ||
|
||
verbose = False | ||
|
||
|
||
def log(*args): | ||
if verbose: | ||
print(*args, "\n", file=sys.stderr) | ||
|
||
|
||
async def fetch_status(genesis_path: str, nodes: str = None, ident: DidKey = None, network_name: str = None): | ||
# Start Of Engine | ||
attempt = 3 | ||
while attempt: | ||
try: | ||
pool = await open_pool(transactions_path=genesis_path) | ||
except: | ||
log("Pool Timed Out! Trying again...") | ||
if not attempt: | ||
print("Unable to get pool Response! 3 attempts where made. Exiting...") | ||
exit() | ||
attempt -= 1 | ||
continue | ||
break | ||
|
||
result = [] | ||
verifiers = {} | ||
|
||
if ident: | ||
request = build_get_validator_info_request(ident.did) | ||
ident.sign_request(request) | ||
else: | ||
request = build_get_txn_request(None, 1, 1) | ||
|
||
from_nodes = [] | ||
if nodes: | ||
from_nodes = nodes.split(",") | ||
response = await pool.submit_action(request, node_aliases = from_nodes) | ||
try: | ||
# Introduced in https://github.com/hyperledger/indy-vdr/commit/ce0e7c42491904e0d563f104eddc2386a52282f7 | ||
verifiers = await pool.get_verifiers() | ||
except AttributeError: | ||
pass | ||
# End Of Engine | ||
|
||
result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers) | ||
print(json.dumps(result, indent=2)) | ||
|
||
def get_script_dir(): | ||
return os.path.dirname(os.path.realpath(__file__)) | ||
|
||
|
||
def download_genesis_file(url: str, target_local_path: str): | ||
log("Fetching genesis file ...") | ||
target_local_path = f"{get_script_dir()}/genesis.txn" | ||
urllib.request.urlretrieve(url, target_local_path) | ||
|
||
def load_network_list(): | ||
with open(f"{get_script_dir()}/networks.json") as json_file: | ||
networks = json.load(json_file) | ||
return networks | ||
|
||
def list_networks(): | ||
networks = load_network_list() | ||
return networks.keys() | ||
|
||
if __name__ == "__main__": | ||
monitor_plugins = PluginCollection('plugins') | ||
|
||
parser = argparse.ArgumentParser(description="Fetch the status of all the indy-nodes within a given pool.") | ||
parser.add_argument("--net", choices=list_networks(), help="Connect to a known network using an ID.") | ||
parser.add_argument("--list-nets", action="store_true", help="List known networks.") | ||
parser.add_argument("--genesis-url", default=os.environ.get('GENESIS_URL') , help="The url to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_URL' environment variable.") | ||
parser.add_argument("--genesis-path", default=os.getenv("GENESIS_PATH") or f"{get_script_dir()}/genesis.txn" , help="The path to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_PATH' environment variable.") | ||
parser.add_argument("-s", "--seed", default=os.environ.get('SEED') , help="The privileged DID seed to use for the ledger requests. Can be specified using the 'SEED' environment variable. If DID seed is not given the request will run anonymously.") | ||
parser.add_argument("--nodes", help="The comma delimited list of the nodes from which to collect the status. The default is all of the nodes in the pool.") | ||
parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging.") | ||
|
||
monitor_plugins.get_parse_args(parser) | ||
args, unknown = parser.parse_known_args() | ||
|
||
verbose = args.verbose | ||
|
||
monitor_plugins.load_all_parse_args(args) | ||
|
||
if args.list_nets: | ||
print(json.dumps(load_network_list(), indent=2)) | ||
exit() | ||
|
||
network_name = None | ||
if args.net: | ||
log("Loading known network list ...") | ||
networks = load_network_list() | ||
if args.net in networks: | ||
log("Connecting to '{0}' ...".format(networks[args.net]["name"])) | ||
args.genesis_url = networks[args.net]["genesisUrl"] | ||
network_name = networks[args.net]["name"] | ||
|
||
if args.genesis_url: | ||
download_genesis_file(args.genesis_url, args.genesis_path) | ||
if not network_name: | ||
network_name = args.genesis_url | ||
if not os.path.exists(args.genesis_path): | ||
print("Set the GENESIS_URL or GENESIS_PATH environment variable or argument.\n", file=sys.stderr) | ||
parser.print_help() | ||
exit() | ||
|
||
did_seed = None if not args.seed else args.seed | ||
|
||
log("indy-vdr version:", indy_vdr.version()) | ||
if did_seed: | ||
ident = DidKey(did_seed) | ||
log("DID:", ident.did, " Verkey:", ident.verkey) | ||
else: | ||
ident = None | ||
|
||
asyncio.get_event_loop().run_until_complete(fetch_status(args.genesis_path, args.nodes, ident, network_name)) | ||
# Introduced in https://github.com/hyperledger/indy-vdr/commit/ce0e7c42491904e0d563f104eddc2386a52282f7 | ||
log("Getting list of verifiers ...") | ||
verifiers = await pool.get_verifiers() | ||
except AttributeError: | ||
log("Unable to get list of verifiers. Please make sure you have the latest version of indy-vdr.") | ||
pass | ||
|
||
if verifiers and from_nodes: | ||
for node in from_nodes: | ||
if not node in verifiers: | ||
raise NodeNotFound(f'{node} is not a member of {network_name}.') | ||
|
||
log("Submitting request ...") | ||
response = await pool.submit_action(request, node_aliases = from_nodes) | ||
|
||
log("Passing results to plugins for processing ...") | ||
result = await monitor_plugins.apply_all_plugins_on_value(result, network_name, response, verifiers) | ||
log("Processing complete.") | ||
return result |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,73 @@ | ||
""" | ||
File created by tiangolo. | ||
https://github.com/tiangolo/uvicorn-gunicorn-docker/blob/master/docker-images/gunicorn_conf.py | ||
""" | ||
|
||
import json | ||
import multiprocessing | ||
import os | ||
|
||
workers_per_core_str = os.getenv("WORKERS_PER_CORE", "1") | ||
max_workers_str = os.getenv("MAX_WORKERS") | ||
use_max_workers = None | ||
if max_workers_str: | ||
use_max_workers = int(max_workers_str) | ||
web_concurrency_str = os.getenv("WEB_CONCURRENCY", None) | ||
|
||
host = os.getenv("HOST", "0.0.0.0") | ||
port = os.getenv("PORT", "8080") | ||
bind_env = os.getenv("BIND", None) | ||
use_loglevel = os.getenv("LOG_LEVEL", "info") | ||
if bind_env: | ||
use_bind = bind_env | ||
else: | ||
use_bind = f"{host}:{port}" | ||
|
||
cores = multiprocessing.cpu_count() | ||
workers_per_core = float(workers_per_core_str) | ||
default_web_concurrency = workers_per_core * cores | ||
if web_concurrency_str: | ||
web_concurrency = int(web_concurrency_str) | ||
assert web_concurrency > 0 | ||
else: | ||
web_concurrency = max(int(default_web_concurrency), 2) | ||
if use_max_workers: | ||
web_concurrency = min(web_concurrency, use_max_workers) | ||
accesslog_var = os.getenv("ACCESS_LOG", "-") | ||
use_accesslog = accesslog_var or None | ||
errorlog_var = os.getenv("ERROR_LOG", "-") | ||
use_errorlog = errorlog_var or None | ||
graceful_timeout_str = os.getenv("GRACEFUL_TIMEOUT", "120") | ||
timeout_str = os.getenv("TIMEOUT", "120") | ||
keepalive_str = os.getenv("KEEP_ALIVE", "5") | ||
|
||
# Gunicorn config variables | ||
loglevel = use_loglevel | ||
workers = web_concurrency | ||
bind = use_bind | ||
errorlog = use_errorlog | ||
worker_tmp_dir = "/dev/shm" | ||
accesslog = use_accesslog | ||
graceful_timeout = int(graceful_timeout_str) | ||
timeout = int(timeout_str) | ||
keepalive = int(keepalive_str) | ||
|
||
|
||
# For debugging and testing | ||
log_data = { | ||
"loglevel": loglevel, | ||
"workers": workers, | ||
"bind": bind, | ||
"graceful_timeout": graceful_timeout, | ||
"timeout": timeout, | ||
"keepalive": keepalive, | ||
"errorlog": errorlog, | ||
"accesslog": accesslog, | ||
# Additional, non-gunicorn variables | ||
"workers_per_core": workers_per_core, | ||
"use_max_workers": use_max_workers, | ||
"host": host, | ||
"port": port, | ||
} | ||
print('gunicorn config:') | ||
print(json.dumps(log_data, indent=2)) |
Oops, something went wrong.