def _auth(self): """ Authenticate Amigo on GCP, fetching the credentials and saving it to a local file that can be used for the service discovery. """ # Set the credentials to be used by amigo local_cred_file = util.get_value(self.config, "local_cred_file") try: self.auth = Storage(local_cred_file) if util.is_file(util.get_value(self.config, "key_file")): creds = GoogleCredentials.get_application_default() self.auth.put(creds) return True except IOError: util.print_to_stderr( "Cannot open {0} to write, ensure you are running as root. ". format(local_cred_file)) except ApplicationDefaultCredentialsError: util.print_to_stderr("Cannot authenticate to GCP.") return False
def get_lab_list(): repo = util.get_value('GLOBAL', 'REPO') try: c = con.cursor() sql = "SELECT lab, disp_name, credit_to, short_desc, coalesce(auth_type,''), \n" + \ " coalesce(enabled_image,''), coalesce(disabled_image,'') \n" + \ " FROM labs ORDER BY 1" c.execute(sql) l_list = c.fetchall() except Exception as e: fatal_sql_error(e,sql,"meta.lab_list()") labs = [] for row in l_list: lab_dict = {} lab = str(row[0]) lab_dict['enabled'] = util.get_value("labs", lab) lab_dict['lab'] = lab lab_dict['disp_name'] = str(row[1]) lab_dict['credit_to'] = str(row[2]) lab_dict['short_desc'] = str(row[3]) lab_dict['auth_type'] = str(row[4]) lab_dict['enabled_image_url'] = repo + "/" + str(row[5]) lab_dict['disabled_image_url'] = repo + "/" + str(row[6]) labs.append(lab_dict) return labs
def print_url(ui, mono, amis, auth): ui = util.get_value(args.aws, ui) mono = util.get_value(args.aws, mono) amis = util.get_value(args.aws, amis) print('To use service, copy paste this') print(ui + '?amisApi=' + quote(amis) + '&monoApi=' + quote(mono)) authDemo = util.get_value(args.aws, auth) print('To test authorizer, copy this') print(ui + '?amisApi=' + quote(authDemo) + '&monoApi=' + quote(mono))
def getTestSetting(self): """ Method to get test list setting of bam. :return: It yields json string. """ yield self.session.publish('com.bigsql.onGetTestSetting', util.get_value("GLOBAL", "STAGE", ""))
def _fetch_attributes_for_projects(self): """ Fetch attributes for each GCP project in the database, saving the data in disk. """ gcp_attributes = self.config["gcp_attributes"] for project in self.database.get_table("projects"): project_name = util.get_value(project, "projectId") # Gets all the resources specified in the config file (e.g. "compute") for attribute_resource, attribute_item_list in gcp_attributes.items( ): gcp = GCPWrapper(self.config, attribute_resource, "v1") # Loop on the attributes in of that resource (e.g. firewalls, networks, etc) for attribute_item in attribute_item_list: attribute_data = gcp.fetch_attribute(attribute_item, project=project_name) if attribute_data: self._record_attribute_data_reports( attribute_item, attribute_data, project_name) self._record_attribute_data_to_db( attribute_item, attribute_data, project_name) # Get any warning generated by this GCP instance. if gcp.warnings: self.warnings.extend(gcp.warnings) return True
def main(args): base = util.get_value(args.aws, 'monoUrl') gets = [base + 'api/values', base + 'api/init/users'] if not args.aws: gets.insert(0, base + 'api/init/table') # print('Init database and get default') for url in gets: print(f'Method GET endpoint {url}') response = requests.get(url) util.print_result(response) # auth = util.login(base, 'admin', 'demo') # print('Get my data') util.print_result(requests.get(base + 'api/auth/me', headers=auth)) # get_all_documents(base, auth) # doc = add_document(base, auth, "Invoice to be removed") # remove_document(base, auth, doc['id']) # doc = add_document(base, auth, "Invoice to be tested") # get_all_documents(base, auth) # get_permissions(base, auth) # get_permission_jwt(base, auth, doc['id']) # remove_all_documents(base, auth)
def step(sprops, dprops): for name, prop in sprops.iteritems(): if not check(name, prop, dprops): return if util.is_primitive(prop) or util.is_from_group(prop): value = util.get_value(prop) if name not in dprops: dprops[name] = util.ValueFromGroup(value, src, depth) elif util.is_from_group(dprops[name]): if util.get_depth(dprops[name]) == depth: errors.append(MergeError( ('value for "%s" is absent in "%s" but ' + 'came from two diffrent groups: "%s" and "%s", ' + 'can\'t merge') % (name, dst, src, dprops[name].source))) elif util.get_depth(dprops[name]) > depth: dprops[name] = util.ValueFromGroup(value, src, depth) elif type(prop) == list: dprops[name] = (merge_lists(prop, dprops[name]) if name in dprops else prop) elif type(prop) == dict: dprops[name] = (step(prop, dprops[name]) if name in dprops else prop) else: errors.append(MergeError('unknown type of prop %s (%s), ' + 'can\'t merge' % (name, type(prop)))) return dprops
def _setup(self): """ Set Amigo to run. """ # Set output directories output_dir = util.get_value(self.config, "reports_dir") util.print_to_stdout( "Setting up output directory at '{0}'".format(output_dir)) util.create_dir(output_dir) # Set current report self.reports = util.get_full_path( util.get_value(self.config, "reports_dir"), util.get_date()) util.create_dir(self.reports) util.print_to_stdout("Reports are being saved to '{0}'".format( self.reports)) # Search for previous reports days_back = 30 for day in range(1, days_back): self.previous_reports = util.get_full_path( util.get_value(self.config, "reports_dir"), util.get_date(day)) if not util.is_path(self.previous_reports): util.print_to_stdout( "No previous reports found at {0}'".format( self.previous_reports)) else: util.print_to_stdout( "Previous report {0} found ({1} day(s) ago)".format( self.previous_reports, day), color="yellow") break # Create Database self.database = Database(self.database_path) util.create_dir(self.database_path) util.print_to_stdout("Database is being saved at '{0}'".format( self.database_path)) # Violation report config results_dir = util.get_value(self.config, "results_dir") util.create_dir(results_dir) results_file = util.get_value(self.config, "results_log_file") self.results = util.get_full_path(results_dir, results_file) util.print_to_stdout("Reports will be saved at '{0}'".format( self.results))
def update_fields(service_id): properties = get_properties(service_id) if not properties.keys(): return if get_value(properties, "AutoConnect") == "true": autoconn = "Yes" else: autoconn = "No" form.get('autoconnect').value = autoconn form.get('nameservers').value = get_str_value(properties, "Nameservers.Configuration") form.get('timeservers').value = get_str_value(properties, "Timeservers.Configuration") form.get('domains').value = get_str_value(properties, "Domains.Configuration") value = get_dict_value(properties, "Proxy.Configuration", "Method") if value == "": value = "auto" form.get('proxymethod').value = value form.get('proxyurl').value = get_dict_value(properties, "Proxy.Configuration", "URL") form.get('proxyservers').value = get_dict_value(properties, "Proxy.Configuration", "Servers") form.get('proxyexcludes').value = get_dict_value(properties, "Proxy.Configuration", "Excludes") form.get('ipv4method').value = get_dict_value(properties, "IPv4.Configuration", "Method") form.get('ipv4address').value = get_dict_value(properties, "IPv4.Configuration", "Address") form.get('ipv4netmask').value = get_dict_value(properties, "IPv4.Configuration", "Netmask") form.get('ipv4gateway').value = get_dict_value(properties, "IPv4.Configuration", "Gateway") form.get('ipv6method').value = get_dict_value(properties, "IPv6.Configuration", "Method") form.get('ipv6address').value = get_dict_value(properties, "IPv6.Configuration", "Address") value = get_dict_value(properties, "IPv6.Configuration", "PrefixLength") if value != "": value = "%d" % value form.get('ipv6prefixlen').value = value form.get('ipv6gateway').value = get_dict_value(properties, "IPv6.Configuration", "Gateway") form.get('ipv6privacy').value = get_dict_value(properties, "IPv6.Configuration", "Privacy")
def getBetaFeatureSetting(self, settingName): """ Method to get test list setting of bam. :return: It yields json string. """ yield self.session.publish('com.bigsql.onGetBeataFeatureSetting', { 'setting': settingName, 'value': util.get_value("BETA", settingName) })
def mainline(): ## need from_version & to_version if len(sys.argv) == 3: p_from_ver = sys.argv[1] p_to_ver = sys.argv[2] else: print ("ERROR: Invalid number of parameters, try: ") print (" python update-hub.py from_version to_version") sys.exit(1) print ("") print ("Running update-hub from v" + p_from_ver + " to v" + p_to_ver) if p_from_ver >= p_to_ver: print ("Nothing to do.") sys.exit(0) if (p_from_ver < "2.7.0") and (p_to_ver >= "2.7.0"): update_2_7_0() if (p_from_ver < "3.1.0") and (p_to_ver >= "3.1.0"): update_3_1_0() if (p_from_ver < "3.1.1") and (p_to_ver >= "3.1.1"): update_3_1_1() if (p_from_ver < "3.2.1") and (p_to_ver >= "3.2.1"): update_3_2_1() PGC_HOME = os.getenv('PGC_HOME', '') try: import shutil src = os.path.join(os.path.dirname(__file__), "pgc.sh") dst = os.path.join(PGC_HOME, "pgc") if platform.system() == "Windows": src = os.path.join(os.path.dirname(__file__), "pgc.bat") dst = os.path.join(PGC_HOME, "pgc.bat") shutil.copy(src, dst) except Exception as e: pass if (p_from_ver < "3.2.9") and (p_to_ver >= "3.2.9"): old_default_repo = "http://s3.amazonaws.com/pgcentral" new_default_repo = "https://s3.amazonaws.com/pgcentral" current_repo = util.get_value("GLOBAL", "REPO") if current_repo == old_default_repo: util.set_value("GLOBAL", "REPO", new_default_repo) if (p_from_ver < "3.3.0") and (p_to_ver >= "3.3.0"): update_3_3_0() if (p_from_ver < "3.3.3") and (p_to_ver >= "3.3.3"): update_3_3_3() print (" ") print ("Goodbye.") sys.exit(rc) return
def step(attr): if util.is_primitive(attr) or util.is_from_group(attr): return util.get_value(attr) elif type(attr) == list: return map(step, attr) elif type(attr) == dict: for key in attr.iterkeys(): attr[key] = step(attr[key]) return attr else: assert False, "unknown type %s" % type(attr)
def init(self, name, password, dataDir, port, host=None): """ Method to initialize a server component. :param name: Name of the component to be initialized. """ pgcCmd = PGC_HOME + os.sep + "pgc --json init " + name + " --datadir " + dataDir + " --port " + port if port == '': pgcCmd = pgcCmd.split(' --port')[0] if dataDir == '': pgcCmd = pgcCmd.split(' --datadir')[0] if host: pgc_host_info = util.get_pgc_host(host) cred_info = util.get_credentials_by_uuid( pgc_host_info.get('ssh_cred_id')) enc_secret = util.get_value("GLOBAL", "SECRET", "") enc_key = "{0}{1}".format(enc_secret, cred_info.get("cred_uuid")) ssh_username = cred_info.get("ssh_user") password = "" if cred_info.get("ssh_passwd"): password = util.decrypt(cred_info.get("ssh_passwd"), enc_key) ssh_key = "" if cred_info.get("ssh_key"): ssh_key = util.decrypt(cred_info.get("ssh_key"), enc_key) sudo_pwd = "" if cred_info.get("ssh_sudo_pwd"): sudo_pwd = util.decrypt(cred_info.get("ssh_sudo_pwd"), enc_key) ssh_host = pgc_host_info.get('host') from PgcRemote import PgcRemote remote = PgcRemote(ssh_host, ssh_username, password=password, ssh_key=ssh_key) remote.connect() is_file_added = remote.add_file('/tmp/.pgpass', password) remote.disconnect() pgcCmd = pgcCmd + " --pwfile /tmp/.pgpass --host \"" + host + "\"" if util.is_postgres(name) and not host: pgpass_file = PGC_HOME + os.sep + name + os.sep + ".pgpass" if not os.path.isfile(pgpass_file): password_file = open(pgpass_file, 'w') password_file.write(password + '\n') password_file.close() os.chmod(pgpass_file, 0600) pgcProcess = subprocess.Popen(pgcCmd, stdout=subprocess.PIPE, shell=True) for line in iter(pgcProcess.stdout.readline, ''): try: ln = (line).rstrip('\n') if type(eval(ln)) is list: yield self.session.publish('com.bigsql.onInit', ln) except: pass
def __init__(self, config): self.config = config self.database_path = util.get_value(self.config, "database_json") self.reports = None self.previous_reports = None self.results = None self.warnings = [] self._setup()
def meta_list(p_isJSON, p_isVERBOSE, p_meta, p_instance, return_dict=False): repo = util.get_value("GLOBAL", "REPO") url = repo + "/" + p_meta + ".txt" try: response = urllib2.urlopen(url, timeout=15) meta_string = response.read() except Exception as e: util.exit_message("Cannot retrieve METALIST '" + url + "'", 1, p_isJSON) dict = [] kount = 0 for line in meta_string.splitlines(): kount = kount + 1 word = line.split("\t") for i in range(len(word)): word[i] = word[i].strip() ## process header rows if kount < 3: if kount == 1: ## row 1 contains column names num_cols = len(word) col_names = word else: ## row 2 contains column titles col_titles = word continue ## process valid detail rows if len(word) == num_cols: if p_instance: if word[0] != p_instance: continue d = {} for i in range(len(word)): d[col_names[i]] = word[i] dict.append(d) if return_dict: return dict if p_isJSON: print(json.dumps(dict, sort_keys=True, indent=2)) else: print("") print(api.format_data_to_table(dict, col_names, col_titles)) return (0)
def get_json_file(p_file, p_isJSON): json_file = p_file + ".txt" repo = util.get_value("GLOBAL", "REPO") repo_file = repo + "/" + json_file out_dir = os.getenv("MY_HOME") + os.sep + "conf" + os.sep + "cache" if util.http_get_file(False, json_file, repo, out_dir, False, ""): out_file = out_dir + os.sep + json_file try: return (json.loads(util.read_file_string(out_file))) except: pass util.exit_message("Cannot process json_file '" + p_file + "'", 1, p_isJSON)
def main(args): base = util.get_value(args.aws, 'monoUrl') if not args.aws: url = base + 'api/init/table' print('Init database') print(f'Method GET endpoint {url}') response = requests.get(url) # url = base + 'api/init/users' print('Init users') print(f'Method GET endpoint {url}') response = requests.get(url) util.print_result(response)
def get(self): host = request.args.get('hostname') cred_name = request.args.get('cred_name') import util cred_info = util.get_credentials_by_name(cred_name) enc_secret = util.get_value("GLOBAL", "SECRET", "") enc_key = "{0}{1}".format(enc_secret, cred_info.get("cred_uuid")) username = cred_info.get("ssh_user") password = "" if cred_info.get("ssh_passwd"): password = decrypt(cred_info.get("ssh_passwd"), enc_key) ssh_key = "" if cred_info.get("ssh_key"): ssh_key = decrypt(cred_info.get("ssh_key"), enc_key) sudo_pwd = "" if cred_info.get("ssh_sudo_pwd"): sudo_pwd = decrypt(cred_info.get("ssh_sudo_pwd"), enc_key) # username = request.args.get('username') # password = request.args.get('password') # ssh_key = request.args.get('ssh_key') # sudo_pwd = request.args.get('sudo_pwd', None) from PgcRemote import PgcRemote json_dict = {} try: remote = PgcRemote(host, username, password=password, ssh_key=ssh_key, sudo_pwd=sudo_pwd) if not sudo_pwd: remote.connect() json_dict['state'] = "success" try: remote_pgc_path = remote.get_exixting_pgc_path() for key in remote_pgc_path.keys(): json_dict[key] = remote_pgc_path[key] except Exception as e: print(str(e)) pass data = json.dumps([json_dict]) remote.disconnect() except Exception as e: errmsg = "ERROR: Cannot connect to " + username + "@" + host + " - " + str( e) json_dict['state'] = "error" json_dict['msg'] = errmsg data = json.dumps([json_dict]) return data
def get(self, host, comp, pgpasswd, username=None, password=None): from PgcRemote import PgcRemote json_dict = {} if password == None or username == None: import util pgc_host_info = util.get_pgc_host(host) ssh_host = pgc_host_info.get('host') ssh_host_name = pgc_host_info.get('host_name') ssh_cred_id = pgc_host_info.get('ssh_cred_id') cred_info = util.get_credentials_by_uuid(ssh_cred_id) enc_secret = util.get_value("GLOBAL", "SECRET", "") enc_key = "{0}{1}".format(enc_secret, cred_info.get("cred_uuid")) ssh_username = cred_info.get("ssh_user") password = "" if cred_info.get("ssh_passwd"): ssh_password = decrypt(cred_info.get("ssh_passwd"), enc_key) ssh_key = "" if cred_info.get("ssh_key"): ssh_key = decrypt(cred_info.get("ssh_key"), enc_key) sudo_pwd = "" if cred_info.get("ssh_sudo_pwd"): sudo_pwd = decrypt(cred_info.get("ssh_sudo_pwd"), enc_key) is_sudo = pgc_host_info.get('is_sudo') util.update_cred_used(cred_info.get("cred_uuid")) try: remote = PgcRemote(ssh_host, ssh_username, password=ssh_password, ssh_key=ssh_key) remote.connect() is_file_added = remote.add_file('/tmp/.pgpass', pgpasswd) remote.disconnect() data = pgc.get_data("init", comp, ssh_host_name, '/tmp/.pgpass') except Exception as e: errmsg = "ERROR: Cannot connect to " + ssh_username + "@" + ssh_host + " - " + str( e.args[0]) json_dict['state'] = "error" json_dict['msg'] = errmsg data = json.dumps([json_dict]) return data
def get(self): host = request.args.get('hostname') check_sudo_password = request.args.get('pwd') pgc_host_info = util.get_pgc_host(host) pgc_host = pgc_host_info.get('host') ssh_cred_id = pgc_host_info.get('ssh_cred_id') cred_info = util.get_credentials_by_uuid(ssh_cred_id) enc_secret = util.get_value("GLOBAL", "SECRET", "") enc_key = "{0}{1}".format(enc_secret, cred_info.get("cred_uuid")) pgc_user = cred_info.get("ssh_user") pgc_passwd = "" if cred_info.get("ssh_passwd"): pgc_passwd = decrypt(cred_info.get("ssh_passwd"), enc_key) pgc_ssh_key = "" if cred_info.get("ssh_key"): pgc_ssh_key = decrypt(cred_info.get("ssh_key"), enc_key) util.update_cred_used(cred_info.get("cred_uuid")) from PgcRemote import PgcRemote json_dict = {} try: remote = PgcRemote(pgc_host, pgc_user, password=pgc_passwd, ssh_key=pgc_ssh_key, sudo_pwd=check_sudo_password) remote.connect() is_sudo = remote.has_root_access() json_dict['state'] = "success" json_dict['isSudo'] = is_sudo data = json.dumps([json_dict]) remote.disconnect() except Exception as e: errmsg = "ERROR: Cannot connect to " + username + "@" + host + " - " + str( e) json_dict['state'] = "error" json_dict['msg'] = errmsg data = json.dumps([json_dict]) return data
def check_custom_rules(self): """ Load custom rules from rules.yaml files check every rule violation over every resources' report. """ results = [] rules = util.read_yaml_file(self.rules_file) try: for rule_name, rule in rules.items(): if util.get_value(rule, "rule_type") == "key_value": # All types of rules go here. result = self._do_keyvalue_rule(rule_name, rule) if result: results.append(result) except AttributeError: util.print_to_stdout("Rule file ('{0}') is ill-formatted.".format(self.rules_file)) return results
def main(args): base = util.get_value(args.aws, 'monoUrl') if not args.aws: gets = ['api/init/table'] # print('Init database and get default') for url in gets: print(f'Method GET endpoint {url}') response = requests.get(base + url) util.print_result(response) # auth, user_id = util.login(base, 'admin', 'demo') # docs = demo_mono.get_all_documents(base, auth) if len(docs) == 0: print('No documents. Creating one.') doc = demo_mono.add_document(base, auth, "Some demo document") else: print('There are documents. Selecting first') doc = docs[0] print('Active document "' + doc['name'] + '"') doc_id = doc['id'] # jwt_auth = get_jwt(base, auth, doc_id) # base_amis = util.get_value(args.aws, 'amisUrl') # headers(base_amis, jwt_auth) # decoded(base_amis, jwt_auth) if not args.aws: # Naturally, in AWS we can't list all the buckets that are there. That would not make any sense. So this is only for minio. bucket(base_amis, jwt_auth) # get_files(base_amis, jwt_auth, doc_id) # permissions = demo_mono.get_permissions(base, auth) area = get_first_area_with_add(permissions) print(f'Add files to area {area}') files = [{ 'name': 'temp.txt', 'content_type': 'text/plain; charset=UTF-8' }, { 'name': 'img.jpg', 'content_type': 'image/jpeg' }] for f in files: presign_data = get_presigned_url(base_amis, jwt_auth, doc_id, area, f) upload_file_to_presigned_url(presign_data['url'], presign_data['key'], f, user_id) files = get_files(base_amis, jwt_auth, doc_id) if args.nodelete: print('Not fetching and deleting files. Look from browser. Exiting.') exit() get_file(base_amis, jwt_auth, files[0]['path']) delete_file(base_amis, jwt_auth, files[0]['path']) files = get_files(base_amis, jwt_auth, doc_id) get_file(base_amis, jwt_auth, files[0]['path']) delete_file(base_amis, jwt_auth, files[0]['path']) files = get_files(base_amis, jwt_auth, doc_id)
[[x / 100 for x in range(1000)] , [ x**0.5 for x in range(1000)]] , [ [x - 10 for x in range (20)] , [(x%2 - 0.5)*2 for x in range (20)] ], ] #get their names name_curves = ['linear' , 'square' , 'linear neg' , 'root' , 'high and low'] #plot the functions for d in data : plt.plot(d[0],d[1]) plt.xlabel('x') plt.ylabel('y') plt.title('example') #extend window to have some place to write the coordinate plt.legend(name_curves , bbox_to_anchor=(1.04,1)) fig.tight_layout () #add and activate the events zoom_event=mouse_zoom(scale = 2 , bound = True ,ax=ax,fig=fig) zoom_event.activate() grab_event = grab_move (ax=ax,fig=fig) grab_event.activate() value_event = get_value(data=data , name_curves = name_curves , colors = "#0000ff", ax=ax, fig=fig) value_event.activate() plt.show()
def main(args): base = util.get_value(args.aws, 'authUrl') print('Using base url %s' % base) print('-- ENCODED JWT') response = requests.post(base + 'jwt', json={ 'docId': ID, 'permissions': [ {'id': 'image', 'permissions': ['DELETE', 'READ', 'WRITE']} ], 'meta': { 'userid': 'randomid', 'givenName': 'Keijo', 'familyName': 'Test' } }) body = json.loads(response.content) jwt = body['jwt'] print(jwt) print('-- SHOULD BE OK') response = requests.post(base + 'auth', json={ 'docId': ID, 'area': 'image', 'permission': 'DELETE', 'jwt': jwt }) print(response.status_code) body = json.loads(response.content) print(json.dumps(body, indent=4, sort_keys=True)) print('-- SHOULD FAIL FOR ID') response = requests.post(base + 'auth', json={ 'docId': ID + '1', 'area': 'image', 'permission': 'DELETE', 'jwt': jwt }) print(response.status_code) print('-- SHOULD FAIL FOR AREA') response = requests.post(base + 'auth', json={ 'docId': ID, 'area': 'images', 'permission': 'DELETE', 'jwt': jwt }) print(response.status_code) print('-- SHOULD FAIL FOR PERMISSION') response = requests.post(base + 'auth', json={ 'docId': ID, 'area': 'images', 'permission': 'DELETES', 'jwt': jwt }) print(response.status_code) print('-- SHOULD FAIL FOR JWT') response = requests.post(base + 'auth', json={ 'docId': ID, 'area': 'image', 'permission': 'DELETE', 'jwt': jwt + '1' }) print(response.status_code)
def test_valid_dict_input(self): x = {'a': 1, 'b': 52, 'd': 6} self.assertEqual(util.get_value(x, 'a'), 1) self.assertEqual(util.get_value(x, 'b'), 52) self.assertEqual(util.get_value(x, 'd'), 6)
def test_dict_missing_key(self): x = {'a': 1, 'b': 52, 'd': 6} self.assertEqual(util.get_value(x, 'q'), None)
def test_valid_list_input(self): y = ['a', 'c', 'd'] self.assertEqual(util.get_value(y, 'a'), 0) self.assertEqual(util.get_value(y, 'c'), 1) self.assertEqual(util.get_value(y, 'd'), 2)
def test_list_missing_key(self): y = ['a', 'c', 'd'] self.assertEqual(util.get_value(y, 'b'), None)
import os os.environ['KERAS_BACKEND'] = 'tensorflow' from keras.models import load_model from keras.utils import plot_model import numpy as np import util FILE = '../data/sample_1.csv' SAVE = '../../model/model_1' training_data, policy_training_target, value_training_target, \ testing_data, policy_testing_target, value_testing_target = util.read_csv(FILE) model = load_model(SAVE) for i in range(10): predictions = model.predict(np.array([testing_data[i]])) print("cycle{0}".format(i)) print("policy is {0}, value is {1}".format(util.get_policy(predictions), util.get_value(predictions)[1]))
def __init__( self, data_type_vocab_manager, operation_vocab_manager, digit_vocab_manager, opts, is_test=False ): self._data_type_vocab_manager = data_type_vocab_manager self._operation_vocab_manager = operation_vocab_manager self._digit_vocab_manager = digit_vocab_manager self._is_test = is_test self._max_memory_size = util.get_value(opts, "max_memory_size") self._max_value_size = util.get_value(opts, "max_value_size") self._max_argument_num = util.get_value(opts, "max_argument_num") # if self._is_test: # self._batch_size = 1 # else: self._batch_size = util.get_value(opts, "batch_size") self._case_num = util.get_value(opts, "case_num") self._batch_with_case_size = self._batch_size * self._case_num self._batch_with_case_and_memory_size = self._batch_with_case_size * self._max_memory_size self._dropout = util.get_value(opts, "dropout", 0.25) # Embedding Size self._data_type_embedding_dim = util.get_value(opts, "data_type_embedding_dim") self._operation_embedding_dim = util.get_value(opts, "operation_embedding_dim") self._lambda_embedding_dim = util.get_value(opts, "lambda_embedding_dim") self._digit_embedding_dim = util.get_value(opts, "digit_embedding_dim") # Memory Encoder (DNN), Size self._memory_encoder_layer_1_dim = util.get_value(opts, "memory_encoder_layer_1_dim") self._memory_encoder_layer_2_dim = util.get_value(opts, "memory_encoder_layer_2_dim") # Output Encoder (DNN), Size self._output_encoder_layer_1_dim = util.get_value(opts, "output_encoder_layer_1_dim") self._output_encoder_layer_2_dim = util.get_value(opts, "output_encoder_layer_2_dim") # Guide Hidden Layer self._guide_hidden_dim = util.get_value(opts, "guide_hidden_dim") # Operation Selector Hidden Size self._operation_selector_dim = util.get_value(opts, "operation_selector_dim") self._gradient_clip = util.get_value(opts, "gradient_clip", 5) # Regularization term self._regularization_terms = list() if self._is_test: self._build_test_graph() else: self._build_train_graph()
def changed(properties, value, input): if get_value(properties, value) != input: return True else: return False