def generate_feed(req): prefix = lib.absolute_prefix() news = _enum_news() feed = "" lastmodified = "" for id in news: filename = lib.valid_dir(djoin(dirname(__file__), basename(id))) filename = filename + ".md" if exists(filename + ".md") else filename + ".news" if id == news[0]: lastmodified = lib.get_time_for_file(filename, True) content, headline, author = parse_news(id) content = sub('style=".*?"', "", escape(content)) uri = djoin(lib.get_config("CanonicalName"), prefix, "News", "?id=" + id) feed += lib.get_template("feedentry") % { "uri": uri, "title": headline, "mtime": lib.get_time_for_file(filename, True), "content": content, "author": author } req.content_type = "application/atom+xml; charset=UTF-8" return lib.get_template("feed") % { "uri": djoin(lib.get_config("CanonicalName"), prefix), "self": djoin(lib.get_config("CanonicalName"), prefix, "News", "?feed=true"), "mtime": lastmodified, "content": feed }
def index (req, repo=None, version="0.0.0", OS="None"): if not repo: repolist = "" desc = "Download " + " ".join ([repo["name"] for repo in lib.get_config ("Repositories","Downloads")]) for repo in lib.get_config ("Repositories","Downloads"): try: repolist += _snippet_for_repo (None, repo) except Exception as e: apache.log_error (str(e)) return lib.respond (req, "<div class='news'>"+repolist+"<br/><br/></div>", "Downloads", "Downloads", desc, module_info ()) else: repository = None for entry in lib.get_config ("Repositories","Downloads"): if entry["repo"] == repo: repository = entry break else: return lib.e404 (req, "Could not find a matching repository.", module_info()) try: ver = _latest_ver (repo=repository) except Exception as e: apache.log_error (traceback.format_exc()) return lib.e404 (req, "Repository corrupt or wrong configuration, please contact the administrator.", module_info()) if version == ver: return lib.respond (req, "You're already running the lastest version of %s." % repository["name"], "Downloads", "Downloads", "Updater", module_info()) else: return lib.respond (req, _snippet_for_repo (repo=repository),"Downloads", "Downloads", "Download %s" % repository["name"], module_info())
def create_auth_token(self): ''' Request for authorization token :return: string, string, datetime.datetime ''' logging.info("Sending request for token") # retrieve API call configs from config file client_secret = lib.get_config("client_secret") api_url = lib.get_config("token_api_url") # construct data that passes to the POST request payload = {"grant_type": "client_credentials", "scope": "/api", "client_id": self.client_id, "client_secret": client_secret} headers = {"accept": "application/json", "content-type": "application/x-www-form-urlencoded"} token = None try: data = self.request(api_url, headers, payload, method="POST") token_type, token, expires_in = data["token_type"], data["access_token"], data["expires_in"] # the token is valid for one hour only, calculate the expiry time token_expiry = datetime.now() + timedelta(seconds=expires_in - 1) logging.info("This new token will be expired at %s" % token_expiry) except: logging.exception("Error when retriving token") return token_type, token, token_expiry
def _snippet_for_repo (req=None,repo=None): if req or not repo: return version = _latest_ver (req,repo) retval = "<h3>%s</h3>Latest version: <strong>%s</strong><br/>" % (repo["name"],version) retval += "<a href='%s'>Download source code</a><br/>" % (lib.get_config ("SnapshotDownloadURI","Downloads") % {"SitePrefix":lib.get_config("SitePrefix"),"repo":repo["repo"],"version":version}) if repo["win32"]: retval += "<a href='%s'>Download Windows installer</a>"% (lib.get_config ("win32URI","Downloads") % {"SitePrefix":lib.get_config("SitePrefix"),"repo":repo["repo"],"version":version}) return retval
def test_create(self): project_id = new_project(name="test", owner="*****@*****.**") raw_tf = read_key_or_default(f"{project_id}/terraform.tfstate", "EMPTY") config = get_config(project_id) self.assertTrue(raw_tf.startswith(b"{")) self.assertEqual(config["name"], "test")
def __init__(self): config = lib.get_config() my_host = config["db"]["address"] my_port = config["db"]["port"] my_user = config["db"]["user"] my_pass = config["db"]["password"] my_dbname = config["db"]["db_name"] # Connect to mysql server try: self.cnx = mysql.connector.connect(host=my_host, port=my_port, user=my_user, password=my_pass) except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print("Something is wrong with your user name or password") print(err) exit(1) # Open DB, create if needed try: cursor = self.cnx.cursor() self.cnx.database = my_dbname except mysql.connector.Error as err: if err.errno == errorcode.ER_BAD_DB_ERROR: self.create_database(cursor) self.cnx.database = my_dbname else: print(err) exit(1) # Ensure all schema is created self.create_tables(cursor) # Init complete self.cnx.commit()
def lambda_handler(event, context): project_id = event["pathParameters"]["projectId"] logger.info(f"Got request for project {project_id}") statefile = f"{project_id}/terraform.tfstate" self_url = "https://" + event["requestContext"]["domainName"] config = get_config(project_id) if config["name"] == "invalid": return create_response( f"No project exists, please visit {self_url}/project/new", 404) project_name = config["name"] logger.info(f"Got request for {project_name} with id {project_id}") # Get existing state or create new if event["httpMethod"] == "GET": logger.info("Type is GET, send state") data = read_key_or_default(statefile) return create_response(data.decode("utf-8")) # update if event["httpMethod"] == "POST": logger.info("Type is POST, save and send state") data = event["body"] metadata = get_tf_metadata(data, True) if metadata["terraform_version"] == "invalid": return create_response("Unable to parse", code=500) else: write_key(statefile, data) # todo: write the terraform.tfstate.serial return create_response(data)
def retrieve_historical_data(self, start_date, end_date, tags, start_time=None, end_time=None, frequency="DAILY", price_points="C", latest_only=False): ''' Request for the historical time series within given time period, frequency and price point :param start_date: int (e.g. 20181107) :param end_date: int (e.g. 20181107) :param tags: list (size of 1 to 100 distinct tags) :param start_time: int (e.g. hhmm, two digit (24) hour * 100 + two digit minute) :param end_time: int (e.g. hhmm, two digit (24) hour * 100 + two digit minute) :param frequency: string (MONTHLY, WEEKLY, DAILY (default), HOURLY, MI10, MI01) :param price_points: string (C (default), OHLC) :param latest_only: boolean (default = False) :return: dict ''' self.refresh_token() api_url = lib.get_config("historical_data_api_url") + "?client_id=" + self.client_id headers = {"accept": "application/json", "content-type": "application/json", "authorization": self.token_type.capitalize() + " " + self.token} payload = json.dumps({"startDate": start_date, "endDate": end_date, "tags": tags, "startTime": start_time, "endTime": end_time, "frequency": frequency, "pricePoints": price_points, "latestOnly": latest_only}) logging.info("Requesting Historical Data: %s" % payload) data = self.request(api_url, headers, payload, method="POST") return data
def index (req): response = "" for file in _enum_files (): with open (file) as snippet: response += markdown (lib.escape ("\n".join (snippet.readlines()))) for entry in lib._entries ("homepage_widget"): response += "<a href='%s'>"% lib.ljoin (entry[0])+"<h3>%(title)s</h3></a><div>%(content)s</div>" % entry[1] return lib.respond (req, response, "","", lib.get_config ("SiteDesc"), None)
def index(req): response = "" for file in _enum_files(): with open(file) as snippet: response += markdown(lib.escape("\n".join(snippet.readlines()))) for entry in lib._entries("homepage_widget"): response += "<a href='%s'>" % lib.ljoin(entry[ 0]) + "<h3>%(title)s</h3></a><div>%(content)s</div>" % entry[1] return lib.respond(req, response, "", "", lib.get_config("SiteDesc"), None)
def index (req,page=None): entries = lib.get_config ("Entries","Static") pagemeta = None for entry in entries: if entry["uri"] == page and "src" in entry: pagemeta = entry break else: req.headers_out["location"] = lib.get_config ("SitePrefix").encode() req.status = apache.HTTP_MOVED_TEMPORARILY return with open ( lib.valid_dir (djoin (dirname(__file__),pagemeta["src"])) ) as pagefile: text = lib.escape ("\n".join (pagefile.readlines())) if ".md" in pagemeta["src"]: text = markdown (text) text = lib.get_template ("static") % {"content": text} return lib.respond (req, text, pagemeta["title"],pagemeta["caption"],pagemeta["description"],module_info(active=page))
def generate_entries (req=None,current=None): if req: return entry_tpl = lib.get_template ("menu") retval = "" for entry in lib.get_config ("Entries","Static"): if "src" in entry: uri = lib.ljoin ("Static","?page="+entry["uri"]) else: uri = entry["uri"] if entry["uri_is_relative"] is False else lib.ljoin (entry["uri"]) retval += entry_tpl % {"icon": entry["icon"], "name": entry["name"], "path": uri, "cls": "em" if current == entry["uri"] else ""} return retval
def index(req, repo=None, version="0.0.0", OS="None"): if not repo: repolist = "" desc = "Download " + " ".join([ repo["name"] for repo in lib.get_config("Repositories", "Downloads") ]) for repo in lib.get_config("Repositories", "Downloads"): try: repolist += _snippet_for_repo(None, repo) except Exception as e: apache.log_error(str(e)) return lib.respond( req, "<div class='news'>" + repolist + "<br/><br/></div>", "Downloads", "Downloads", desc, module_info()) else: repository = None for entry in lib.get_config("Repositories", "Downloads"): if entry["repo"] == repo: repository = entry break else: return lib.e404(req, "Could not find a matching repository.", module_info()) try: ver = _latest_ver(repo=repository) except Exception as e: apache.log_error(traceback.format_exc()) return lib.e404( req, "Repository corrupt or wrong configuration, please contact the administrator.", module_info()) if version == ver: return lib.respond( req, "You're already running the lastest version of %s." % repository["name"], "Downloads", "Downloads", "Updater", module_info()) else: return lib.respond(req, _snippet_for_repo(repo=repository), "Downloads", "Downloads", "Download %s" % repository["name"], module_info())
def _snippet_for_repo(req=None, repo=None): if req or not repo: return version = _latest_ver(req, repo) retval = "<h3>%s</h3>Latest version: <strong>%s</strong><br/>" % ( repo["name"], version) retval += "<a href='%s'>Download source code</a><br/>" % ( lib.get_config("SnapshotDownloadURI", "Downloads") % { "SitePrefix": lib.get_config("SitePrefix"), "repo": repo["repo"], "version": version }) if repo["win32"]: retval += "<a href='%s'>Download Windows installer</a>" % ( lib.get_config("win32URI", "Downloads") % { "SitePrefix": lib.get_config("SitePrefix"), "repo": repo["repo"], "version": version }) return retval
def main(): db = db_api.db_api() config = lib.get_config() logger = lib.get_logger(PROCESS) logger.warn("=== Starting {}".format(PROCESS)) grin_api_url = "http://" + config["grin_node"]["address"] + ":" + config[ "grin_node"]["api_port"] status_url = grin_api_url + "/v1/status" blocks_url = grin_api_url + "/v1/blocks/" validation_depth = int(config[PROCESS]["validation_depth"]) response = requests.get(status_url) latest = int(response.json()["tip"]["height"]) last = latest - validation_depth # start a reasonable distance back logger.warn("Starting from block #{}".format(last)) # last = 0 for i in range(last, latest): url = blocks_url + str(i) response = requests.get(url).json() # print("{}: {}".format(response["header"]["height"], response["header"]["hash"])) data_block = (response["header"]["hash"], response["header"]["version"], response["header"]["height"], response["header"]["previous"], response["header"]["timestamp"][:-1], response["header"]["output_root"], response["header"]["range_proof_root"], response["header"]["kernel_root"], response["header"]["nonce"], response["header"]["total_difficulty"], response["header"]["total_kernel_offset"]) try: rec = db.get_blocks_by_height([i]) if len(rec) > 0: r = rec[0] #print("Got block {} at height {}".format(r[0], r[2])) if r[0] != response["header"]["hash"]: logger.warn( "Found an orphan - height: {}, hash: {} vs {}".format( r[2], r[0], response["header"]["hash"])) db.set_block_state("orphan", int(i)) else: logger.warn("Adding missing block - height: {}".format( response["header"]["height"])) # XXX TODO: Probably want to mark it as "missing" so we know it was filled in after the fact? db.add_blocks([data_block], True) except: # XXX TODO: Something pass sys.stdout.flush() db.set_last_run(PROCESS, str(time.time())) db.close()
def index(req, page=None): entries = lib.get_config("Entries", "Static") pagemeta = None for entry in entries: if entry["uri"] == page and "src" in entry: pagemeta = entry break else: req.headers_out["location"] = lib.get_config("SitePrefix").encode() req.status = apache.HTTP_MOVED_TEMPORARILY return with open(lib.valid_dir(djoin(dirname(__file__), pagemeta["src"]))) as pagefile: text = lib.escape("\n".join(pagefile.readlines())) if ".md" in pagemeta["src"]: text = markdown(text) text = lib.get_template("static") % {"content": text} return lib.respond(req, text, pagemeta["title"], pagemeta["caption"], pagemeta["description"], module_info(active=page))
def test_request_post_complete(self): event = { "httpMethod": "POST", "body": "name=test&owner=test%40test.de", } result = lambda_handler(event, {}) self.assertEqual(result["statusCode"], 301) project_id = result["headers"]["Location"].split("/")[-2] state = json.loads( read_key_or_default(f"{project_id}/terraform.tfstate", "NA")) self.assertEqual(state["serial"], 0) config = get_config(project_id) self.assertEqual(config["name"], "test")
def main(): db = db_api.db_api() config = lib.get_config() logger = lib.get_logger(PROCESS) logger.warn("=== Starting {}".format(PROCESS)) # Get the list of pool_blocks that are # old enough to unlock and # are not orphan blocks logger.debug(config.sections()) # XXX TODO: The node may not be synced, may need to wait? grin_api_url = "http://" + config["grin_node"]["address"] + ":" + config[ "grin_node"]["api_port"] status_url = grin_api_url + "/v1/status" blocks_url = grin_api_url + "/v1/blocks/" block_locktime = int(config[PROCESS]["block_locktime"]) block_expiretime = int(config[PROCESS]["block_expiretime"]) response = requests.get(status_url) latest = int(response.json()["tip"]["height"]) logger.debug("Latest: {}", format(latest)) new_poolblocks = db.get_poolblocks_by_state('new') for (pb_hash, pb_height, pb_nonce, pb_actual_difficulty, pb_net_difficulty, pb_timestamp, pb_found_by, pb_state) in new_poolblocks: if pb_height < latest - block_expiretime: # Dont re-process very old blocks - protection against duplicate payouts. logger.debug( "Processed expired pool block at height: {}".format(pb_height)) db.set_poolblock_state("expired", int(pb_height)) continue response = requests.get(blocks_url + str(pb_height)).json() # print("Response: {}".format(response)) if int(response["header"]["nonce"]) != int(pb_nonce): logger.debug( "Processed orphan pool block at height: {}".format(pb_height)) db.set_poolblock_state("orphan", int(pb_height)) else: if pb_height < (latest - block_locktime): logger.debug( "Unlocking pool block at height: {}".format(pb_height)) db.set_poolblock_state("unlocked", int(pb_height)) sys.stdout.flush() db.set_last_run(PROCESS, str(time.time())) db.close() logger.warn("=== Completed {}".format(PROCESS)) sys.stdout.flush()
def retrieve_tag_browser(self, prefix): ''' Request for Tag tree structure at the prefix level. prefix = "" means root level :param prefix: string :return: dict ''' self.refresh_token() api_url = lib.get_config("tag_browser_api_url") + "?client_id=" + self.client_id headers = {"accept": "application/json", "content-type": "application/json", "authorization": self.token_type.capitalize() + " " + self.token} payload = json.dumps({"prefix": prefix}) logging.info("Requesting Tag Browser: %s" % payload) data = self.request(api_url, headers, payload, method="POST") return data
def main(): global PROCESS global LOGGER global CONFIG CONFIG = lib.get_config() LOGGER = lib.get_logger(PROCESS) # XXX TODO: Kubernetes does not always get the volume mounted before the processes start # maybe need a loop waiting on it # XXX TODO: Need to handle the case where one thread dies but the other lives - probably # want to to exit with error status if both threads are not healthy t_pool = threading.Thread(name='PoolShareWatcher', target=process_pool_log) t_grin = threading.Thread(name='GrinShareWatcher', target=process_grin_log) t_pool.start() t_grin.start() t_pool.join() t_grin.join()
def retrieve_metadata(self, tags, frequency="EOD"): ''' Request for Metadata for a tag or group of tags :param tags: list :param frequency: string (EOD (default), INTRADAY) :return: dict ''' self.refresh_token() api_url = lib.get_config("metadata_api_url") + "?client_id=" + self.client_id headers = {"accept": "application/json", "content-type": "application/json", "authorization": self.token_type.capitalize() + " " + self.token} payload = json.dumps({"tags": tags, "frequency": frequency}) logging.info("Requesting Metadata: %s" % payload) data = self.request(api_url, headers, payload, method="POST") return data
def retrieve_tag_list(self, prefix, regex=".*"): ''' Request for list of tags that belongs to the prefix tag :param prefix: string (e.g. COMMODITIES.SPOT.SPOT_GOLD, needs at least the first two levels, namely Category and Sub Category in the Data Browser) :param regex: string (e.g. .*(GOLD|SILVER), default returns everything) :return: dict ''' self.refresh_token() api_url = lib.get_config("tag_list_api_url") + "?client_id=" + self.client_id headers = {"accept": "application/json", "content-type": "application/json", "authorization": self.token_type.capitalize() + " " + self.token} payload = json.dumps({"prefix": prefix, "regex": regex}) logging.info("Requesting Tag List: %s" % payload) data = self.request(api_url, headers, payload, method="POST") return data
def lambda_handler(event, context): logger.info(event) logger.info(context) # decode user and password if not "Authorization" in event["headers"]: return create_policy("NONE", "Deny") auth_raw = event["headers"]["Authorization"].split(" ")[1] auth_obj = base64.b64decode(auth_raw).decode("utf-8").split(":") auth_user = auth_obj[0] auth_pass = auth_obj[1] arn = event["methodArn"] logger.info(f"Got auth request from {auth_user} for {arn}") # if a new reuqest, use the global key (TBD restrict the cli to the token) if auth_user == "admin": logger.info("Got request for admin") if auth_pass == KEY: logger.info("PW correct allow usage of new") return create_policy(auth_user, arn, "Allow") else: logger.warn("Admin key does not fit, reject") raise Exception("Unauthorized") # for state or info use the key from the project if event["resource"] == "/project/{projectId}/terraform.tfstate": logger.info("Got request for info or state") if auth_user == "token": project_id = event["pathParameters"]["projectId"] logger.info(f"Got request for project {project_id}") config = get_config(project_id) project_name = config["name"] project_token = config["token"] logger.info(f"Got request for {project_name} with id {project_id}") if project_token == auth_pass: logger.info("Token fits the project ones, allow") return create_policy(project_id, arn, "Allow") else: logger.warn("Token does not fit, reject") raise Exception("Unauthorized") logger.error("Got invalid request, Deny") raise Exception("Unauthorized")
def start_httpd(): "Simple httpd" s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('', 80)) s.listen(5) while True: conn, addr = s.accept() print('Got a connection from %s' % str(addr)) request = conn.recv(1024) res = process_request(request) if not res: cfg = lib.get_config() html = web_page(cfg) else: html = web_page(res) response = HTTP_RESPONSE % (len(html), html) conn.send(response) conn.close()
def generate_entries(req=None, current=None): if req: return entry_tpl = lib.get_template("menu") retval = "" for entry in lib.get_config("Entries", "Static"): if "src" in entry: uri = lib.ljoin("Static", "?page=" + entry["uri"]) else: uri = entry[ "uri"] if entry["uri_is_relative"] is False else lib.ljoin( entry["uri"]) retval += entry_tpl % { "icon": entry["icon"], "name": entry["name"], "path": uri, "cls": "em" if current == entry["uri"] else "" } return retval
def main(): db = db_api.db_api() config = lib.get_config() logger = lib.get_logger(PROCESS) logger.warn("=== Starting {}".format(PROCESS)) grin_api_url = "http://" + config["grin_node"]["address"] + ":" + config[ "grin_node"]["api_port"] status_url = grin_api_url + "/v1/status" blocks_url = grin_api_url + "/v1/blocks/" check_interval = float(config[PROCESS]["check_interval"]) last = get_current_height(status_url) while True: latest = get_current_height(status_url) for i in range(last + 1, latest + 1): last = latest url = blocks_url + str(i) response = requests.get(url).json() logger.warn("New Block: {} at {}".format( response["header"]["hash"], response["header"]["height"])) data_block = (response["header"]["hash"], response["header"]["version"], response["header"]["height"], response["header"]["previous"], response["header"]["timestamp"][:-1], response["header"]["output_root"], response["header"]["range_proof_root"], response["header"]["kernel_root"], response["header"]["nonce"], response["header"]["total_difficulty"], response["header"]["total_kernel_offset"]) try: db.add_blocks([data_block]) except: pass sys.stdout.flush() sleep(check_interval) logger.warn("=== Completed {}".format(PROCESS))
def generate_page(page, news_amount_factor, show_nav=True): if not type(page) is int: return news_per_page = int( lib.get_config("NewsPerPage") * float(news_amount_factor)) news = _enum_news() pages = int(ceil(len(news) / float(news_per_page))) content = "".join([ parse_news(id)[0] for id in news[page * news_per_page:page * news_per_page + news_per_page] ]) page_str = "" if show_nav: for i in range(pages): page_str += '<a class="link %(cls)s" href="?page=%(i)i">%(i)i</a> | ' % { "i": i, "cls": "em" if i == page else "" } if page > 0: page_str = '<a class="link" href="?page=0"><img width="16" height="16" class="inline" src="/static/first.png" alt="goto first page"/>Newest</a> | <a rel="prev" class="link" href="?page=%(i)i"><img width="16" height="16" class="inline" src="/static/prev.png" alt="goto newer page"/>Newer</a> | ' % { "i": page - 1 } + page_str if page < (pages - 1): page_str += '<a rel="next" class="link" href="?page=%(i)i">Older<img class="inline" width="16" height="16" src="/static/next.png" alt="goto older page"/></a> | <a class="link" href="?page=%(l)i">Oldest<img class="inline" width="16" height="16" src="/static/last.png" alt="goto last page"/></a>' % { "i": page + 1, "l": pages - 1 } return lib.get_template("news_pages") % { "pages": page_str, "content": content }
def lambda_handler(event, context): CONFIG_TOPIC = os.environ["CONFIG_TOPIC"] STATE_TOPIC = os.environ["STATE_TOPIC"] DOMAIN = os.environ.get("DOMAIN") key = event["Records"][0]["s3"]["object"]["key"] project_id = key.split("/")[0] filename = key.split("/")[1] logger.info(f"Key is {key}") logger.info(f"project_id is {project_id}") logger.info(f"filename is {filename}") config = get_config(project_id) project_name = config["name"] if filename == "config.json": send_message( CONFIG_TOPIC, project_id, f'Project "{project_name}" ({project_id}) settings has been creted or updated', f"Project {project_name} update", ) if filename == "terraform.tfstate": report = gen_report(project_id) state = "UNKNOWN" if report["state"] == 1: state = "WARNING" if report["state"] == 2: state = "CRITICAL" send_message( STATE_TOPIC, project_id, f'Project "{project_name}" ({project_id}) was deployed is now in state {state},\nPlease check here https://{DOMAIN}/{project_id}/info for further details', f"[{state}] Project {project_name} was deployed", )
import numpy as np import tensorflow as tf from lib import get_config from network.py_func import corner_py from tensorflow.python.ops import control_flow_ops from tensorflow.python.training import moving_averages import os proj_path = os.path.abspath(os.curdir) cfg = get_config(proj_path, 'configure.yml') DEFAULT_PADDING = 'SAME' def layer(op): def layer_decorated(self, *args, **kwargs): # Automatically set a name if not provided. name = kwargs.setdefault('name', self.get_unique_name(op.__name__)) # Figure out the layer inputs. if len(self.inputs) == 0: raise RuntimeError('No input variables found for layer %s.' % name) elif len(self.inputs) == 1: layer_input = self.inputs[0] else: layer_input = list(self.inputs) # Perform the operation and get the output. layer_output = op(self, layer_input, *args, **kwargs) # Add to layer LUT. self.layers[name] = layer_output # This output is now the input for the next layer.
def _latest_ver (req=None,repo=None): if req or not repo: return git_dir = djoin (lib.get_config ("RepoPath","Downloads"),repo["repo"]) return Popen (["git", "--git-dir="+git_dir, "show-ref"],stdout=PIPE).communicate()[0].split("\n")[-2].split("/")[-1]
Script written by David Guyon (david <at> guyon <dot> me). Creation date: 26/10/2018 Last update: 30/04/2019 """ import sys import lib import json import pprint import datetime from os import path, remove # Getting configuration from YAML file start, end, nodes = lib.get_config() ### # Get wattmeter/port information analyzed_nodes = dict() for node in nodes: cluster = node.split('-')[0] print("Getting wattmeter information from %s" % node) curl_cmd = "curl -s -k https://api.grid5000.fr/stable/sites/lyon/clusters/" + cluster + "/nodes/" + node output = lib.exec_bash(curl_cmd).decode('utf-8') if "401 Unauthorized" in output: print("You need to execute this script from WITHIN Grid'5000") sys.exit() json_data = json.loads(output) # skip node if not equiped with wattmeter
def main(): global LOGGER global CONFIG CONFIG = lib.get_config() LOGGER = lib.get_logger(PROCESS) LOGGER.warn("=== Starting {}".format(PROCESS)) # DB connection details db_host = CONFIG["db"]["address"] + ":" + CONFIG["db"]["port"] db_user = CONFIG["db"]["user"] db_password = CONFIG["db"]["password"] db_name = CONFIG["db"]["db_name"] mysqlcontsraints = MysqlConstants(db_host, db_user, db_password, db_name) # Connect to DB database.db = database_details(MYSQL_CONSTANTS=mysqlcontsraints) database.db.initialize() wallet_dir = CONFIG[PROCESS]["wallet_dir"] minimum_payout = int(CONFIG[PROCESS]["minimum_payout"]) os.chdir(wallet_dir) utxos = Pool_utxo.getPayable(minimum_payout) database.db.getSession().commit() # XXX TODO: Use the current balance, timestamp, the last_attempt timestamp, last_payout, and failed_attempts # XXX TODO: to filter and sort by order we want to make payment attempts for utxo in utxos: try: LOGGER.warn("Trying to pay: {} {} {}".format( utxo.id, utxo.address, utxo.amount)) # Lock just this current record for update locked_utxo = Pool_utxo.get_locked_by_id(utxo.id) # Save and Zero the balance original_balance = locked_utxo.amount locked_utxo.amount = 0 # Savepoint changes - if we crash after sending coins but before commit we roll back to here. # The pool audit service finds lost payouts and restores user balance database.db.getSession().begin_nested() # Attempt to make the payment timestamp = "{:%B %d, %Y %H:%M:%S.%f}".format(datetime.now()) status = makePayout(locked_utxo.address, original_balance) LOGGER.warn("Payout status: {}".format(status)) if status == 0: LOGGER.warn("Made payout for {} {} {}".format( locked_utxo.id, locked_utxo.address, original_balance)) # Update timestamp of last payout, number of failed payout attempts locked_utxo.amount = 0 locked_utxo.failure_count = 0 locked_utxo.last_try = timestamp locked_utxo.last_success = timestamp # Commit changes database.db.getSession().commit() else: LOGGER.error("Failed to make payout: {} {} {}".format( locked_utxo.id, locked_utxo.address, original_balance)) # Restore the users balance locked_utxo.amount = original_balance # Update number of failed payout attempts if locked_utxo.failure_count is None: locked_utxo.failure_count = 0 locked_utxo.failure_count += 1 locked_utxo.last_try = timestamp # Commit changes database.db.getSession().commit() database.db.getSession().commit() except Exception as e: LOGGER.error("Failed to process utxo: {} because {}".format( utxo.id, str(e))) database.db.getSession().rollback() sys.exit(1) LOGGER.warn("=== Completed {}".format(PROCESS))
def test_load_invalid(self): config = get_config("notexistingid") self.assertEqual(config["name"], "invalid")
def test_generated(self): project_id = gen_test_project() config = get_config(project_id) self.assertEqual(config["name"], "test")
import urllib import urllib2 import random import json import lib config = lib.get_config() def drill(data): data.update(config.get("drillbit")) req = urllib2.Request("https://drillbitapp.com/api", urllib.urlencode(data)) res = urllib2.urlopen(req) return json.loads(res.read()) def retrieve_names(name): if type(name).__name__ == "User": return [name.name_first, name.name_last] else: return name def prep_names(names, sample=1000, seed=20130812): """ Throw this first to prepare the names then throw it into drill! """ firstnames = [] lastnames = [] if type(names).__name__ == "Query":
def __init__(self): logging.config.fileConfig("./config/logging.config") logging.info("Created instance of API Proxy") self.client_id = lib.get_config("client_id") self.token_type, self.token, self.token_expiry = self.create_auth_token()