def test_parse(self): # Create simple-minded event handler evthdl = EventHandler() evthdl.buf = '' evthdl.offset = 0 # Create temporary accounting file fd, path = tempfile.mkstemp() w = os.fdopen(fd, 'w') r = open(path, 'r') # Write the first bit to it l = L[:40] print "writing " + l w.write(l) w.flush() os.fsync(fd) self.assertEqual(list(common.parse(r, evthdl)), []) # Write the remainder to it l = L[40:] print "writing " + l w.write(l + '\n') w.flush() os.fsync(fd) self.assertEqual(len(list(common.parse(r, evthdl))), 1) # Cleanup r.close() w.close()
def check_previous_installation(self): log('Check previous installation') env_list = parse(self.ssh.exec_cmd('fuel env list')) if env_list: self.cleanup_fuel_environments(env_list) node_list = parse(self.ssh.exec_cmd('fuel node list')) if node_list: self.cleanup_fuel_nodes(node_list)
def listen(self): #Secondary thread, waits for packets, then parses and executes while self.connected: data = self.conn.recv(common.packetSize) if data != "": commands = common.parse(data) self.execute(commands)
def env_exists(self, env_name): env_list = parse(exec_cmd('fuel env --list')) for env in env_list: if env[E['name']] == env_name and env[E['status']] == 'new': self.env_id = env[E['id']] return True return False
def post(resource): """ Adds one or more documents to a resource. Each document is validated against the domain schema. If validation passes the document is inserted and ID_FIELD, LAST_UPDATED and DATE_CREATED along with a link to the document are returned. If validation fails, a list of validation issues is returned. :param resource: name of the resource involved. .. versionchanged:: 0.0.3 JSON links. Superflous ``response`` container removed. """ if len(request.form) == 0: abort(400) response = dict() date_utc = datetime.utcnow() schema = app.config['DOMAIN'][resource]['schema'] validator = app.validator(schema, resource) for key, value in request.form.items(): response_item = dict() issues = list() try: document = parse(value, resource) validation = validator.validate(document) if validation: document[config.LAST_UPDATED] = \ document[config.DATE_CREATED] = date_utc document[config.ID_FIELD] = app.data.insert(resource, document) response_item[config.ID_FIELD] = document[config.ID_FIELD] response_item[config.LAST_UPDATED] = \ document[config.LAST_UPDATED] response_item['_links'] = \ {'self': document_link(resource, response_item[config.ID_FIELD])} else: issues.extend(validator.errors) except ValidationError as e: raise e except Exception as e: issues.append(str(e)) if len(issues): response_item['issues'] = issues response_item['status'] = config.STATUS_ERR else: response_item['status'] = config.STATUS_OK response[key] = response_item return response, None, None, 200
def transform_lines(url): """ Download an xml file and add line numbering and ctsize it :param url: A Perseus Github Raw address :type url: str :param urn: The urn of the text :type urn: str :param lang: Iso code for lang :type lang: str """ lang, urn, target, parsed = common.parse(url) if "grc" not in urn and "lat" not in urn: type_text = "translation" else: type_text = "edition" """ Change div1 to div, moving their @type to @subtype """ # We find the lines lines = parsed.xpath("//l") # We set a counter i = 1 # We loop over lines for line in lines: # We set the @n attribute using str(i) because .set(_,_) accepts only string line.set("n", str(i)) # We increment the counter i += 1 # We find divs called div1 or div2. Mind the |// ! divs = parsed.xpath("//div1|//div2") # We loop over them for div in divs: # We change it's tag div.tag = "div" citations = list() """ Add refsDecl information for CTS """ citations.append( MyCapytain.resources.texts.tei.Citation( name="line", refsDecl="/tei:TEI/tei:text/tei:body/tei:div[@type='" + type_text + "']//tei:l[@n='$1']" ) ) try: common.write_and_clean(urn, lang, parsed, citations, target) except: print(urn + " failed")
def configure_environment(self): release_list = parse(exec_cmd('fuel release -l')) for release in release_list: if release[R['name']] == self.wanted_release: break config_env = ConfigureEnvironment(self.dea, YAML_CONF_DIR, release[R['id']], self.node_roles_dict) config_env.configure_environment() self.env_id = config_env.env_id
def real(line): encrypted, sector, checksum = parse(line) letters = Counter(encrypted) del letters['-'] return int(sector) if [ letter[0] for letter in sorted(sorted(letters.items(), key=itemgetter(0)), key=itemgetter(1), reverse=True)[:5] ] == list(checksum) else 0
def check_supported_release(self): log('Check supported release: %s' % self.wanted_release) found = False release_list = parse(self.ssh.exec_cmd('fuel release -l')) for release in release_list: if release[R['name']] == self.wanted_release: found = True break if not found: err('This Fuel does not contain the following release: %s' % self.wanted_release)
def verify_node_status(self): node_list = parse(exec_cmd('fuel --env %s node' % self.env_id)) failed_nodes = [] for node in node_list: if node[N['status']] != 'ready': failed_nodes.append((node[N['id']], node[N['status']])) if failed_nodes: summary = '' for node, status in failed_nodes: summary += '[node %s, status %s]\n' % (node, status) err('Deployment failed: %s' % summary, self.collect_logs)
def post(resource): """ Adds one or more documents to a resource. Each document is validated against the domain schema. If validation passes the document is inserted and ID_FIELD, LAST_UPDATED and DATE_CREATED along with a link to the document are returned. If validation fails, a list of validation issues is returned. :param resource: name of the resource involved. """ if len(request.form) == 0: abort(400) response = dict() date_utc = datetime.utcnow() schema = app.config['DOMAIN'][resource]['schema'] validator = app.validator(schema, resource) for key, value in request.form.items(): response_item = dict() issues = list() try: document = parse(value, resource) validation = validator.validate(document) if validation: document[config.LAST_UPDATED] = \ document[config.DATE_CREATED] = date_utc document[config.ID_FIELD] = app.data.insert(resource, document) response_item[config.ID_FIELD] = document[config.ID_FIELD] response_item[config.LAST_UPDATED] = \ document[config.LAST_UPDATED] response_item['link'] = \ document_link(resource, response_item[config.ID_FIELD]) else: issues.extend(validator.errors) except ValidationError as e: raise e except Exception as e: issues.append(str(e)) if len(issues): response_item['issues'] = issues response_item['status'] = config.STATUS_ERR else: response_item['status'] = config.STATUS_OK response[key] = response_item return response, None, None, 200
def verify_node_status(self): node_list = parse(exec_cmd('fuel node list')) failed_nodes = [] for node in node_list: if node[N['status']] != 'ready' and node[N['cluster']] != 'None': failed_nodes.append((node[N['id']], node[N['status']])) if failed_nodes: summary = '' for node, status in failed_nodes: summary += '[node %s, status %s]\n' % (node, status) err('Deployment failed: %s' % summary)
def info(): """Info account """ url=vars.eHost+'/htsp/info' conf_data = common.parse(vars.fileConf) if not conf_data: print("Bad config file") return if (not "jwt" in conf_data): print("You need to login first") return headers={"Authorization":conf_data["jwt"]} response=common.sendingGet(url,headers) print(json.dumps(response["content"],indent=2))
def info_key(key_name): """Info API key """ url=vars.eHost+'/snippets/info' conf_data = common.parse(vars.fileConf) if not conf_data: print("Bad config file") return if (not key_name in conf_data["branch"]): print("Bad key name") return message = {"api_key": conf_data["branch"][key_name]} response=common.sendingPost(url,message) print(json.dumps(response["content"],indent=2))
def discovery_waiting_loop(self, discovered_macs): WAIT_LOOP = 360 SLEEP_TIME = 10 all_discovered = False for i in range(WAIT_LOOP): node_list = parse(self.ssh.exec_cmd('fuel node list')) if node_list: self.node_discovery(node_list, discovered_macs) if self.all_blades_discovered(): all_discovered = True break else: time.sleep(SLEEP_TIME) return all_discovered
def get(file_sign): """Get a cloud id_hjws """ for i in file_sign: data = common.parse(i.name) if not data: print("Bad hjws file") return if (not "id_hjws" in data): print("Bad id_hjws") return url=vars.eHost+'/htsp/hjws/'+data["id_hjws"] response=common.sendingGet(url) print('\n'+i.name) print(json.dumps(response["content"],indent=2))
def reap_environment_info(self): network_file = ('%s/network_%s.yaml' % (self.temp_dir, self.env_id)) network = self.read_yaml(network_file) env = {'environment': {'name': self.env[E['name']], 'net_segment_type': network['networking_parameters']['segmentation_type']}} self.write_yaml(self.dea_file, env) wanted_release = None rel_list = parse(exec_cmd('fuel release')) for rel in rel_list: if rel[R['id']] == self.env[E['release_id']]: wanted_release = rel[R['name']] self.write_yaml(self.dea_file, {'wanted_release': wanted_release})
def delete(key_name,id_hjws): """Delete a cloud hjws with a id_hjws """ url=vars.eHost+'/htsp/hjws' conf_data = common.parse(vars.fileConf) if not conf_data: print("Bad config file") return if (not key_name in conf_data["branch"]): print("Bad key name") return for i in id_hjws: message = {"api_key": conf_data["branch"][key_name], "id_hjws": i} response=common.sendingDel(url,message) print('\n'+i) print(json.dumps(response["content"],indent=2))
def reap_nodes_interfaces_transformations(self): node_list = parse(exec_cmd('fuel node')) real_node_ids = [node[N['id']] for node in node_list] real_node_ids.sort() min_node = real_node_ids[0] interfaces = {} transformations = {} dea_nodes = [] dha_nodes = [] for real_node_id in real_node_ids: node_id = int(real_node_id) - int(min_node) + 1 self.last_node = node_id node = self.get_node_by_id(node_list, real_node_id) roles = commafy(node[N['roles']]) if not roles: err('Fuel Node %s has no role' % real_node_id) dea_node = {'id': node_id, 'role': roles} dha_node = {'id': node_id} if_name, mac = self.reap_interface(real_node_id, interfaces) log('reap transformation for node %s' % real_node_id) tr_name = self.reap_transformation(real_node_id, roles, transformations) dea_node.update({ 'interfaces': if_name, 'transformations': tr_name }) dha_node.update({ 'pxeMac': mac if mac else None, 'ipmiIp': None, 'ipmiUser': None, 'ipmiPass': None, 'libvirtName': None, 'libvirtTemplate': None }) dea_nodes.append(dea_node) dha_nodes.append(dha_node) self.write_yaml(self.dha_file, {'nodes': dha_nodes}, False) self.write_yaml(self.dea_file, {'nodes': dea_nodes}) self.write_yaml(self.dea_file, interfaces) self.write_yaml(self.dea_file, transformations) self.reap_fuel_node_info() self.write_yaml(self.dha_file, {'disks': DISKS})
def info_key(key_name): """Info API key """ url=vars.eHost+'/htsp/info/apikey' conf_data = common.parse(vars.fileConf) if not conf_data: print("Bad config file") return if (not key_name in conf_data["branch"]): print("Bad key name") return message = {"api_key": conf_data["branch"][key_name]} response1=common.sendingPost(url,message) url=vars.eHost+'/htsp/pubkey/'+conf_data["branch"][key_name].split('.')[0] response=common.sendingGet(url) response1["content"]["pubkey"] = response["content"] print(json.dumps(response1["content"],indent=2))
def partOneMod(instr: str) -> int: input_list = common.parse(instr) current_direction = "e" lat = 0 # north/south, pos/neg long = 0 # west/east, pos/neg location_history = [] for instruction in input_list: current_direction, lad, lod = partOne.translate_movement( current_direction, instruction) lat += lad long += lod location_history.append((lat, long)) return location_history
def reap_nodes_interfaces_transformations(self): node_list = parse(exec_cmd('fuel node')) real_node_ids = [node[N['id']] for node in node_list] real_node_ids.sort() min_node = real_node_ids[0] interfaces = {} transformations = {} dea_nodes = [] dha_nodes = [] for real_node_id in real_node_ids: node_id = int(real_node_id) - int(min_node) + 1 self.last_node = node_id node = self.get_node_by_id(node_list, real_node_id) roles = commafy(node[N['roles']]) if not roles: err('Fuel Node %s has no role' % real_node_id) dea_node = {'id': node_id, 'role': roles} dha_node = {'id': node_id} if_name, mac = self.reap_interface(real_node_id, interfaces) log('reap transformation for node %s' % real_node_id) tr_name = self.reap_transformation(real_node_id, roles, transformations) dea_node.update( {'interfaces': if_name, 'transformations': tr_name}) dha_node.update( {'pxeMac': mac if mac else None, 'ipmiIp': None, 'ipmiUser': None, 'ipmiPass': None, 'libvirtName': None, 'libvirtTemplate': None}) dea_nodes.append(dea_node) dha_nodes.append(dha_node) self.write_yaml(self.dha_file, {'nodes': dha_nodes}, False) self.write_yaml(self.dea_file, {'nodes': dea_nodes}) self.write_yaml(self.dea_file, interfaces) self.write_yaml(self.dea_file, transformations) self.reap_fuel_node_info() self.write_yaml(self.dha_file, {'disks': DISKS})
def cleanup_fuel_environments(self, env_list): WAIT_LOOP = 60 SLEEP_TIME = 10 for env in env_list: log('Deleting environment %s' % env[E['id']]) self.ssh.exec_cmd('fuel env --env %s --delete --force' % env[E['id']]) all_env_erased = False for i in range(WAIT_LOOP): env_list = parse(self.ssh.exec_cmd('fuel env list')) if env_list: time.sleep(SLEEP_TIME) else: all_env_erased = True break if not all_env_erased: err('Could not erase these environments %s' % [(env[E['id']], env[E['status']]) for env in env_list])
def get(file_sign): """Get a snippet """ for i in file_sign: print('\n'+i.name) message = common.parse(i.name) if not message: print("Bad json file: ", i.name) return if (not"id_data" in message): print("Bad snpt file: ", i.name) return url=vars.eHost+'/snippets/data/'+message["id_data"] response=common.sendingGet(url) if response["status_code"] != 200: print(json.dumps(response["content"],indent=2)) return print(json.dumps(response["content"],indent=2))
def command_alias(self, line): """ /alias <alias> <command> [args] """ arg = common.shell_split(line) if len(arg) < 2: self.core.information("Alias: Not enough parameters", "Error") return alias = arg[0] command = arg[1] tmp_args = arg[2] if len(arg) > 2 else "" if alias in self.core.commands or alias in self.commands: self.core.information("Alias: command already exists", "Error") return self.commands[alias] = lambda arg: self.get_command(command)(parse(arg, tmp_args)) self.add_command(alias, self.commands[alias], "This command is an alias for /%s %s" % (command, tmp_args)) self.core.information("Alias /%s successfuly created" % alias, "Info")
def intro(self): delete(self.dea_file) delete(self.dha_file) self.temp_dir = tempfile.mkdtemp() date = time.strftime('%c') self.write(self.dea_file, DEA_1.format(date=date, comment=self.comment), False) self.write(self.dha_file, DHA_1.format(date=date, comment=self.comment)) self.get_env() # Need to download deployment with explicit node ids node_list = parse(exec_cmd('fuel node')) real_node_ids = [node[N['id']] for node in node_list] real_node_ids.sort() self.download_node_config(','.join(real_node_ids)) self.download_config('settings') self.download_config('network')
def reap_environment_info(self): network_file = ('%s/network_%s.yaml' % (self.temp_dir, self.env_id)) network = self.read_yaml(network_file) env = { 'environment': { 'name': self.env[E['name']], 'net_segment_type': network['networking_parameters']['segmentation_type'] } } self.write_yaml(self.dea_file, env) wanted_release = None rel_list = parse(exec_cmd('fuel release')) for rel in rel_list: if rel[R['id']] == self.env[E['release_id']]: wanted_release = rel[R['name']] self.write_yaml(self.dea_file, {'wanted_release': wanted_release})
def account(): """Delete a account """ url = vars.eHost + '/htsp/account' conf_data = common.parse(vars.fileConf) if not conf_data: print("Bad config file") return if (not "jwt" in conf_data): print("You need to login first") return message = {} headers = {"Authorization": conf_data["jwt"]} click.confirm('Do you want to continue?', abort=True) response = common.sendingDel(url, message, headers) if response["status_code"] != 200: print(json.dumps(response["content"], indent=2)) return print(json.dumps(response["content"], indent=2)) os.remove(vars.fileConf)
def partTwoMod(instr: str) -> int: input_list = common.parse(instr) waypoint_delta = (1, 10) lat = 0 # north/south, pos/neg long = 0 # west/east, pos/neg location_history = [] waypoint_history = [] for instruction in input_list: waypoint_delta, lad, lod = partTwo.translate_movement( waypoint_delta, instruction) lat += lad long += lod location_history.append((lat, long)) waypoint_history.append( (lat + waypoint_delta[0], long + waypoint_delta[1])) return location_history, waypoint_history
def branch(email,suite): """Create a branch """ url=vars.eHost+'/htsp/branch' conf_data = common.parse(vars.fileConf) if not conf_data: print("Bad config file") return if (not "jwt" in conf_data): print("You need to login first") return if (not email in conf_data["subject"]): print("Bad subject") return if (not "branch" in conf_data): print("You need to init first") return if (not isinstance(conf_data["branch"],dict)): print("Bad branches") return if (not email in conf_data): print("You need to verify your subject") return if (not isinstance(conf_data[email],list)): print("You need to init first") return branch = input("Your branch name: ") if (branch in conf_data["branch"]): print("Branch already exists") return message = {"id_sec": conf_data["subject"][email],"branch":branch, "suite": suite} headers={"Authorization":conf_data["jwt"]} response=common.sendingPost(url,message,headers) if response["status_code"] != 200: print(json.dumps(response["content"],indent=2)) return conf_data["branch"][branch]= response["content"]["api_key"] conf_data[email].append(branch) with open(vars.fileConf, 'w') as outfile: json.dump(conf_data, outfile,indent=2) print("Branch OK!")
def update(key_name): """Update API KEY credentials """ url=vars.eHost+'/htsp/apikey' conf_data = common.parse(vars.fileConf) if not conf_data: print("Bad config file") return if (not key_name in conf_data["branch"]): print("Bad key name") return if (not "jwt" in conf_data): print("You need to login first") return message = {"api_key": conf_data["branch"][key_name]} headers={"Authorization":conf_data["jwt"]} response=common.sendingPut(url,message,headers) conf_data["branch"][key_name]= response["content"]["api_key"] with open(vars.fileConf, 'w') as outfile: json.dump(conf_data, outfile,indent=2) print("API updated OK!")
def login(): """ 3vidence login """ url = vars.eHost + '/auth/login' conf_data = common.parse(vars.fileConf) if not conf_data: print("Bad config file") return if (not "email" in conf_data) or (not "password" in conf_data): print("Bad email/password") return data = {"email": conf_data["email"], "password": conf_data["password"]} response = common.sendingPost(url, data) if response["status_code"] != 200: print(json.dumps(response["content"], indent=2)) return conf_data["jwt"] = response["content"]["jwt"] with open(vars.fileConf, 'w') as outfile: json.dump(conf_data, outfile, indent=2) print("Login OK")
def update(): """ 3vidence password update """ url = vars.eHost + '/auth/password' conf_data = common.parse(vars.fileConf) if not conf_data: print("Bad config file") return if (not "jwt" in conf_data): print("Bad key name") return headers = {"Authorization": conf_data["jwt"]} message = {"": ""} response = common.sendingPut(url, message, headers) if response["status_code"] != 200: print(json.dumps(response["content"], indent=2)) return conf_data["password"] = response["content"]["password"] with open(vars.fileConf, 'w') as outfile: json.dump(conf_data, outfile, indent=2) print("Password updated")
def branch(subject, key_name): """Delete a key """ url = vars.eHost + '/htsp/branch' conf_data = common.parse(vars.fileConf) if not conf_data: print("Bad config file") return if (not "jwt" in conf_data): print("You need to login first") return if (not isinstance(conf_data[subject], list)): print("Bad file config subject/branch") return if (conf_data[subject].count(key_name) == 0): print("Bad subject") return if (not "branch" in conf_data): print("You need to init first") return if (not isinstance(conf_data["branch"], dict)): print("Bad branches") return if (not key_name in conf_data["branch"]): print("Bad key name") return message = {"kid": conf_data["branch"][key_name].split('.')[0]} headers = {"Authorization": conf_data["jwt"]} click.confirm('Do you want to continue?', abort=True) response = common.sendingDel(url, message, headers) if response["status_code"] != 200: print(json.dumps(response["content"], indent=2)) return print(json.dumps(response["content"], indent=2)) del conf_data["branch"][key_name] conf_data[subject].remove(key_name) with open(vars.fileConf, 'w') as outfile: json.dump(conf_data, outfile, indent=2)
def run_deploy(self): SLEEP_TIME = 60 LOG_FILE = 'cloud.log' log('Starting deployment of environment %s' % self.env_id) p = run_proc('fuel --env %s deploy-changes | strings > %s' % (self.env_id, LOG_FILE)) ready = False for i in range(int(self.deploy_timeout)): env = parse(exec_cmd('fuel env --env %s' % self.env_id)) log('Environment status: %s' % env[0][E['status']]) r, _ = exec_cmd('tail -2 %s | head -1' % LOG_FILE, False) if r: log(r) if env[0][E['status']] == 'operational': ready = True break elif (env[0][E['status']] == 'error' or env[0][E['status']] == 'stopped'): break else: time.sleep(SLEEP_TIME) p.poll() if p.returncode == None: log('The process deploying the changes has not yet finished.') log('''The file %s won't be deleted''' % LOG_FILE) else: delete(LOG_FILE) if ready: log('Environment %s successfully deployed' % self.env_id) else: self.collect_error_logs() err('Deployment failed, environment %s is not operational' % self.env_id)
def transform(url): """ Download an xml file and add line numbering and ctsize it :param url: A Perseus Github Raw address :type url: str :param urn: The urn of the text :type urn: str :param lang: Iso code for lang :type lang: str """ lang, urn, target, parsed = common.parse(url) if "grc" not in urn and "lat" not in urn: type_text = "translation" else: type_text = "edition" # We find divs called div1 div1_group = parsed.xpath("//div1") i = 1 for div1 in div1_group: # We change it's tag div1.tag = "div" # To deal with different subtype, we get the former attribute value of type and put it to subtype div1_subtype = div1.get("type") div1.set("subtype", div1_subtype) div1.set("type", "textpart") if "n" not in dict(div1.attrib): div1.set("n", str(i)) i += 1 """ Change div2 to div, moving their @type to @subtype """ # We find divs called div2 i = 1 div2_group = parsed.xpath("//div2") for div2 in div2_group: # We change it's tag div2.tag = "div" # To deal with different subtype, we get the former attribute value of type and put it to subtype div2_subtype = div2.get("type") div2.set("subtype", div2_subtype) div2.set("type", "textpart") if "n" not in dict(div2.attrib): div2.set("n", str(i)) i += 1 """ Change div3 to div, moving their @type to @subtype """ # We find divs called div2 i = 1 div3_group = parsed.xpath("//div3") for div3 in div3_group: # We change it's tag div3.tag = "div" # To deal with different subtype, we get the former attribute value of type and put it to subtype div3_subtype = div3.get("type") div3.set("subtype", div3_subtype) div3.set("type", "textpart") if "n" not in dict(div3.attrib): div3.set("n", str(i)) i += 1 """ Add refsDecl information for CTS """ citations = [] # Used only if div3 > 0 if len(div3_group) > 0: citations.append( MyCapytain.resources.texts.tei.Citation( name=div3_subtype, refsDecl="/tei:TEI/tei:text/tei:body/tei:div[@type='"+type_text+"']/tei:div[@n='$1']/tei:div[@n='$2']/tei:div[@n='$3']" ) ) # Used only if div2 > 0 if len(div2_group) > 0: citations.append( MyCapytain.resources.texts.tei.Citation( name=div2_subtype, refsDecl="/tei:TEI/tei:text/tei:body/tei:div[@type='"+type_text+"']/tei:div[@n='$1']/tei:div[@n='$2']" ) ) citations.append( MyCapytain.resources.texts.tei.Citation( name=div1_subtype, refsDecl="/tei:TEI/tei:text/tei:body/tei:div[@type='"+type_text+"']/tei:div[@n='$1']" ) ) try: common.write_and_clean(urn, lang, parsed, citations, target) except Exception as E: print(urn + " failed") print(E)
def patch(resource, **lookup): """Perform a document patch/update. Updates are first validated against the resource schema. If validation passes, the document is updated and an OK status update is returned. If validation fails, a set of validation issues is returned. :param resource: the name of the resource to which the document belongs. :param **lookup: document lookup query. .. versionchanged:: 0.0.7 Support for Rate-Limiting. .. versionchanged:: 0.0.6 ETag is now computed without the need of an additional db lookup .. versionchanged:: 0.0.5 Support for 'aplication/json' Content-Type. .. versionchanged:: 0.0.4 Added the ``requires_auth`` decorator. .. versionchanged:: 0.0.3 JSON links. Superflous ``response`` container removed. """ payload = payload_() if len(payload) > 1: # only one update-per-document supported abort(400) original = get_document(resource, **lookup) if not original: # not found abort(404) schema = app.config["DOMAIN"][resource]["schema"] validator = app.validator(schema, resource) object_id = original[config.ID_FIELD] last_modified = None etag = None issues = [] key = payload.keys()[0] value = payload[key] response_item = {} try: updates = parse(value, resource) validation = validator.validate_update(updates, object_id) if validation: # the mongo driver has a different precision than the python # datetime. since we don't want to reload the document once it has # been updated, and we still have to provide an updated etag, # we're going to update the local version of the 'original' # document, and we will use it for the etag computation. original.update(updates) # some datetime precision magic updates[config.LAST_UPDATED] = original[config.LAST_UPDATED] = datetime.utcnow().replace(microsecond=0) etag = document_etag(original) app.data.update(resource, object_id, updates) response_item[config.ID_FIELD] = object_id last_modified = response_item[config.LAST_UPDATED] = original[config.LAST_UPDATED] # metadata response_item["etag"] = etag response_item["_links"] = {"self": document_link(resource, object_id)} else: issues.extend(validator.errors) except ValidationError, e: # TODO should probably log the error and abort 400 instead (when we # got logging) issues.append(str(e))
def listen(self): while self.connected: data = self.socket.recv(common.packetSize) for cmd,params in common.parse(data): print(cmd) self.event.notify("cmdRecv", cmd, *params)
def process_IN_MODIFY(self, event): ''' Handles a file modification event in currently-read files as well as in newly-created ones, typically when one or more event records have been appended to the accounting file. The new records are sent to the database here. ''' # Issue: not sure how to deal with the currently-read file if it gets # overwritten. Two options: # 1. The file pointer should be reset because it's a new file # altogether. # 2. The file pointer shouldn't be reset because it's the same data # which has been overwritten in the process of being added new lines # (e.g. vi). # Let's go for the second option, which the following does without even # needing any CREATE event. try: l = latest(self.acctdir) if l == self.acctfile: # There's no new accounting file f = open(self.acctdir + '/' + l) recs = common.parse(f, self) insertc, errorc, heartbeat = \ common.insert(self.logger, common.CETAB, recs, self.connection, self.insertc, self.errorc, self.heartbeat, self.heartbeatdelta) f.close() self.insertc = insertc self.errorc = errorc self.heartbeat = heartbeat else: # There's a new accounting file (or we weren't reading any) # Finish reading the current one if we were reading one if self.acctfile != None: f = open(self.acctdir + '/' + self.acctfile) recs = common.parse(f, self) insertc, errorc, heartbeat = \ common.insert(self.logger, common.CETAB, recs, self.connection, self.insertc, self.errorc, self.heartbeat, self.heartbeatdelta) f.close() self.insertc = insertc self.errorc = errorc self.heartbeat = heartbeat # Read the new one self.acctfile = l self.offset = 0 self.logger.info("Will now be watching %s" % self.acctfile) f = open(self.acctdir + '/' + self.acctfile) recs = common.parse(f, self) insertc, errorc, heartbeat = \ common.insert(self.logger, common.CETAB, recs, self.connection, self.insertc, self.errorc, self.heartbeat, self.heartbeatdelta) f.close() self.insertc = insertc self.errorc = errorc self.heartbeat = heartbeat except common.AcctError: # As raised by latest # No need to fuss if a file we're not interested in gets changed pass
def post(resource): """ Adds one or more documents to a resource. Each document is validated against the domain schema. If validation passes the document is inserted and ID_FIELD, LAST_UPDATED and DATE_CREATED along with a link to the document are returned. If validation fails, a list of validation issues is returned. :param resource: name of the resource involved. .. versionchanged:: 0.0.6 Support for bulk inserts. Please note: validation constraints are checked against the database, and not between the payload documents themselves. This causes an interesting corner case: in the event of a multiple documents payload where two or more documents carry the same value for a field where the 'unique' constraint is set, the payload will validate successfully, as there are no duplicates in the database (yet). If this is an issue, the client can always send the documents once at a time for insertion, or validate locally before submitting the payload to the API. .. versionchanged:: 0.0.5 Support for 'application/json' Content-Type . Support for 'user-restricted resource access'. .. versionchanged:: 0.0.4 Added the ``requires_auth`` decorator. .. versionchanged:: 0.0.3 JSON links. Superflous ``response`` container removed. """ date_utc = datetime.utcnow().replace(microsecond=0) schema = app.config['DOMAIN'][resource]['schema'] validator = app.validator(schema, resource) documents = [] issues = [] # validation, and additional fields payl = payload() for key, value in payl.items(): document = [] doc_issues = [] try: document = parse(value, resource) validation = validator.validate(document) if validation: # validation is successful document[config.LAST_UPDATED] = \ document[config.DATE_CREATED] = date_utc # if 'user-restricted resource access' is enabled and there's # an Auth request active, inject the username into the document username_field = \ app.config['DOMAIN'][resource]['auth_username_field'] if username_field and request.authorization: document[username_field] = request.authorization.username else: # validation errors added to list of document issues doc_issues.extend(validator.errors) except ValidationError as e: raise e except Exception as e: # most likely a problem with the incoming payload, report back to # the client as if it was a validation issue doc_issues.append(str(e)) issues.append(doc_issues) if len(doc_issues) == 0: documents.append(document) # bulk insert if len(documents): ids = app.data.insert(resource, documents) # build response payload response = {} for key, doc_issues in zip(payl.keys(), issues): response_item = {} if len(doc_issues): response_item['status'] = config.STATUS_ERR response_item['issues'] = doc_issues else: response_item['status'] = config.STATUS_OK response_item[config.ID_FIELD] = ids.pop(0) document = documents.pop(0) response_item[config.LAST_UPDATED] = document[config.LAST_UPDATED] response_item['etag'] = document_etag(document) response_item['_links'] = \ {'self': document_link(resource, response_item[config.ID_FIELD])} response[key] = response_item return response, None, None, 200
def patch(resource, **lookup): """Perform a document patch/update. Updates are first validated against the resource schema. If validation passes, the document is updated and an OK status update is returned. If validation fails, a set of validation issues is returned. :param resource: the name of the resource to which the document belongs. :param **lookup: document lookup query. """ if len(request.form) > 1 or len(request.form) == 0: # only one update-per-document supported abort(400) original = get_document(resource, **lookup) if not original: # not found abort(404) schema = app.config['DOMAIN'][resource]['schema'] validator = app.validator(schema, resource) object_id = original[config.ID_FIELD] last_modified = None etag = None issues = list() key = request.form.keys()[0] value = request.form[key] response_item = dict() try: updates = parse(value, resource) validation = validator.validate_update(updates, object_id) if validation: updates[config.LAST_UPDATED] = datetime.utcnow() app.data.update(resource, object_id, updates) # TODO computing etag without reloading the document # would be ideal. However, for reasons that need fruther # investigation, an etag computed on original.update(updates) # won't provide the same result as the saved document. # this has probably comething to do with a) the different # precision between the BSON (milliseconds) python datetime and, # b), the string representation of the documents (being dicts) # not matching. # # TL;DR: find a way to compute a reliable etag without reloading updated = app.data.find_one(resource, **{config.ID_FIELD: object_id}) updated[config.LAST_UPDATED] = \ updated[config.LAST_UPDATED].replace(tzinfo=None) etag = document_etag(updated) response_item[config.ID_FIELD] = object_id last_modified = response_item[config.LAST_UPDATED] = \ updated[config.LAST_UPDATED] # metadata response_item['etag'] = etag response_item['link'] = document_link(resource, object_id) else: issues.extend(validator.errors) except ValidationError, e: # TODO should probably log the error and abort 400 instead (when we # got logging) issues.append(str(e))
def get_env(self): env_list = parse(exec_cmd('fuel env')) if len(env_list) > 1: err('Not exactly one environment') self.env = env_list[0] self.env_id = self.env[E['id']]
def post(resource): """ Adds one or more documents to a resource. Each document is validated against the domain schema. If validation passes the document is inserted and ID_FIELD, LAST_UPDATED and DATE_CREATED along with a link to the document are returned. If validation fails, a list of validation issues is returned. :param resource: name of the resource involved. .. versionchanged:: 0.0.5 Support for 'application/json' Content-Type . Support for 'user-restricted resource access'. .. versionchanged:: 0.0.4 Added the ``reqiores_auth`` decorator. .. versionchanged:: 0.0.3 JSON links. Superflous ``response`` container removed. """ response = {} date_utc = datetime.utcnow() schema = app.config['DOMAIN'][resource]['schema'] validator = app.validator(schema, resource) for key, value in payload().items(): response_item = {} issues = [] try: document = parse(value, resource) validation = validator.validate(document) if validation: document[config.LAST_UPDATED] = \ document[config.DATE_CREATED] = date_utc # if 'user-restricted resource access' is enabled and there's # an Auth request active, inject the username into the document username_field = \ app.config['DOMAIN'][resource]['auth_username_field'] if username_field and request.authorization: document[username_field] = request.authorization.username document[config.ID_FIELD] = app.data.insert(resource, document) response_item[config.ID_FIELD] = document[config.ID_FIELD] response_item[config.LAST_UPDATED] = \ document[config.LAST_UPDATED] response_item['_links'] = \ {'self': document_link(resource, response_item[config.ID_FIELD])} else: issues.extend(validator.errors) except ValidationError as e: raise e except Exception as e: issues.append(str(e)) if len(issues): response_item['issues'] = issues response_item['status'] = config.STATUS_ERR else: response_item['status'] = config.STATUS_OK response[key] = response_item return response, None, None, 200
cfgpath = sys.argv[1] print("Using config file: " + cfgpath) required_fields = { "client": [("trustedca", "string")], "server": [("interface", "string"), ("port", "int"), ("keypath", "string"), ("certpath", "string")] } optional_fields = { "server": [("logfile", "string", None), ("outputdir", "string", "."), ("maxsize", "int", 50)] } try: config = common.parse(cfgpath, required_fields, optional_fields) except Exception as e: print(str(e), file=sys.stderr) sys.exit(-1) config["maxsize"] *= 1024 * 1024 logger = common.getLogger(logfile=config['logfile']) context = common.getContext(protocol=ssl.PROTOCOL_TLS_SERVER, certpath=config['certpath'], trustedca=config['trustedca'], keypath=config['keypath']) logger.info("parsed config: ") for k, v in config.items(): logger.info("{}: {}".format(k, v))
# ('napi_gro_receive', 1), # ('udp_send_skb', 2), # ('tcp_transmit_skb', 2), # ('br_handle_frame_finish', 0), # ('netif_receive_skb_internal', 0), # ('ovs_vport_receive', 0), # ('ovs_execute_actions', 0), # ('e100_xmit_frame', 3), # ('ixgbe_xmit_frame', 3), # ('ip_rcv_finish', 0), # ('ip_forward', 0), # ('ip_forward_finish', 0), # ('ixgbevf_xmit_frame', 3), # ] SKB = 0 SKB_HEAD = 1 DATA = 2 SADDR = 3 DADDR = 4 PROTO = 5 SPORT = 6 DPORT = 7 FUNC_ID = 8 TSTAMP = 9 if __name__ == '__main__': filename = sys.argv[1] outfile = sys.argv[2] func_table, spec_functable = common.parse('config.json') run(filename, outfile, func_table)
import numpy as np from math import sqrt from common import parse, is_corner tiles = parse() width = int(sqrt(len(tiles))) top_left = next(tile for tile in tiles.values() if is_corner(tile)) def fliplr(tile): tile.data = np.fliplr(tile.data) tile.neighbours[2], tile.neighbours[3] = \ tile.neighbours[3], tile.neighbours[2] def flipud(tile): tile.data = np.flipud(tile.data) tile.neighbours[0], tile.neighbours[1] = \ tile.neighbours[1], tile.neighbours[0] def rot90l(tile): tile.data = np.rot90(tile.data) tile.neighbours = { 0: tile.neighbours[3], 1: tile.neighbours[2], 2: tile.neighbours[0], 3: tile.neighbours[1] }
from common import parse A = ord('a') Z = ord('z') def decrypt(word, iterations): return ''.join( ' ' if char == '-' else chr(A + (ord(char) - A + iterations) % (Z - A + 1)) for char in word) with open('input.txt') as data: for line in data: encrypted, sector, _ = parse(line) if decrypt(encrypted, int(sector)) == 'northpole object storage': print(sector) break