def heartbeat(): """ The main function that updates the topology configurations """ is_modified = False ia_list = utils._get_my_asid() resp, err = request_server(ia_list) if err: logging.error("Failed to connect to SCION-COORD server: \n%s" % err) exit(1) elif resp.headers['content-type'] == 'application/json; charset=utf-8': resp_dict = json.loads(resp.content.decode('utf8').replace("'", '"')) ia_list = resp_dict["IAList"] new_br_list = [] for ia in ia_list: connection_dict = ia["Connections"] _isd = ia["ISD"] _as = ia["AS"] ia = ISD_AS.from_values(_isd, _as) as_obj, original_topo = utils.load_topology(ia) topo = original_topo logging.info("Received answer from Heartbeat function : \n%s" % resp_dict) # check for new neighbors for connection in connection_dict: if connection["Status"] == CREATE: is_modified = True topo = utils._add_br(connection, topo) new_br_list.append(utils._get_br_id(connection,topo)[0]) elif connection["Status"] == UPDATE: is_modified = True topo = utils._update_br(connection, topo) elif connection["Status"] == REMOVE: is_modified = True topo = utils._remove_br(connection, topo) if not is_modified: # no change logging.info("Nothing changed not Restarting SCION") else: utils.generate_local_gen(ia, as_obj, topo) logging.info("[INFO] Restarting SCION") utils.restart_scion() # In case we receive the gen folder from the coordinator elif resp.headers['content-type'] == 'application/gzip': logging.info("[INFO] Received gen folder ") utils.parse_response(resp) logging.info("[INFO] Starting SCION !") utils.restart_scion() else: # Received something else # TODO UPDATE BOX ? pass
def parse_domjudge(contest_id): response = utils.get(f'contests/{contest_id}/problems') problems = utils.parse_response(response) config = list() for problem in problems: pid = problem['id'] response = utils.get(f'contests/{contest_id}/problems/{pid}') problem_info = utils.parse_response(response) short_name = problem_info['short_name'] tl = problem['time_limit'] print(f'{short_name}, {tl} seconds') config.append((tl, 1 << 11)) # DOMjudge 居然不返回 ML... return config
def get_data(self, videos_ids): if len(videos_ids) == len(set(videos_ids)): logger.info("Got %s videos to proceed" % len(videos_ids)) else: logger.info("Got %s videos to proceed. Only %s are unique" % (len(videos_ids), len(set(videos_ids)))) data = {video_id: [] for video_id in videos_ids} for video_id in set(videos_ids): try: response = self.make_request(video_id) if response.status_code in (200, 201): picture_links = parse_response(video_id, response) data[video_id] = picture_links if picture_links: self.links += 1 else: logger.error( "Failed to get thumbup's link for video_id=%s - %s" % (video_id, response.text)) except Exception as e: logger.error( "Failed to get thumbup's link for video_id=%s - %s" % (video_id, e)) logger.info("Found thumbups for %s/%s videos" % (self.links, len(set(videos_ids)))) return data
def delete(self, url): resp = self.s.delete(self.config['P2P_API_ROOT'] + url, headers=self.http_headers(), verify=False) self._check_for_errors(resp, url) return utils.parse_response(resp.content)
def getRegionChildren(state,county,city,childtype): search_params = { "city": city, "state": state, "childtype": childtype, 'county': county, "zws_id": Zillow_API_key } region_tags = (('id'), ('name'), ('zindex'), ('latitude'), ('longitude')) region_cols = ['id', 'name','zindex','latitude', 'longitude'] ##Get starting home data## r = utils.get_response(api = 'regionChildren', params = search_params) home = utils.parse_response(response = r, api = 'regionChildren', tags = region_tags, cols = region_cols) #print(home) return home
def delete(self, url): resp = self.s.delete( self.config['P2P_API_ROOT'] + url, headers=self.http_headers(), verify=False) self._check_for_errors(resp, url) return utils.parse_response(resp.content)
def init_box(): """ Calls the init_box API from the SCION-coord. Receive the gen folder and start SCION """ # Find MAC and IP address ip_address = ni.ifaddresses(INTERFACE)[ni.AF_INET][0]['addr'] mac_address = ni.ifaddresses(INTERFACE)[ni.AF_LINK][0]['addr'] conn_results = utils.test_connections() logging.info("Connection test results: %s \n", str(conn_results)) start_port, free_ports = utils.connection_results_2_free_ports( conn_results) resp, err = call_init(mac_address, ip_address, start_port, free_ports) if err: logging.error("Failed to connect to SCION-COORD server: \n %s \n", err) exit(1) elif resp.status_code == 200: if resp.headers['content-type'] == 'application/json; charset=utf-8': # We have received the list of potential neighbors dict = json.loads(resp.content.decode('utf8').replace("'", '"')) utils.save_credentials(dict) logging.info( "Received list of potential neighbors and credentials from SCION-COORD: %s ", str(dict)) if not dict["PotentialNeighbors"]: logging.info("no potential Neighbors !") exit(1) connection_results = test_links(dict["PotentialNeighbors"]) dict["PotentialNeighbors"] = connection_results connect_box(dict) elif resp.headers['content-type'] == 'application/gzip': logging.info("Received gen folder ") utils.parse_response(resp) logging.info("Starting SCION !") utils.start_scion() else: # Received something else # TODO UPDATE ? pass else: logging.error("[ERROR] Wrong status code %s", resp.status_code) exit(1)
def connect_box(dictionary): """ Calls the connect_box API, extracts gen folder and starts SCION :param dictionary: Dictionary with the connection results + credentials """ resp, err = call_connect(dictionary) if err: logging.error("Failed to connect to SCION-COORD server: %s" % err) exit(1) elif resp.headers['content-type'] == 'application/gzip': logging.info("Received gen folder ") utils.parse_response(resp) logging.info("Starting SCION !") utils.start_scion() exit(0) else: logging.error("Did not receive gen folder %s", resp.headers['content-type']) exit(1)
def main_scenario(sides, expected_result): try: response = check_triangle(sides) except: pytest.fail(msg="Could not connect to service. Check connectivity.", pytrace=False) if not valid_xml(response): pytest.fail(msg="Invalid XML response", pytrace=False) actual_result = parse_response(response) assert actual_result == expected_result
def put_json(self, url, data): payload = json.dumps(utils.parse_request(data)) resp = self.s.put( self.config['P2P_API_ROOT'] + url, data=payload, headers=self.http_headers('application/json'), verify=False) resp_log = self._check_for_errors(resp, url) try: return utils.parse_response(resp.json()) except ValueError: log.error('JSON VALUE ERROR ON SUCCESSFUL RESPONSE %s' % resp_log) raise
def put_json(self, url, data): resp = requests.put( self.config['P2P_API_ROOT'] + url, data=json.dumps(data), headers=self.http_headers('application/json'), verify=False) if self.debug: log.debug('URL: %s' % url) log.debug('HEADERS: %s' % self.http_headers()) log.debug('PAYLOAD: %s' % json.dumps(data)) log.debug('STATUS: %s' % resp.status_code) log.debug('RESPONSE_BODY: %s' % resp.content) if resp.status_code >= 500: resp.raise_for_status() elif resp.status_code >= 400: raise P2PException(resp.content) return utils.parse_response(resp.json())
def get(self, url, query=None, if_modified_since=None): if query is not None: url += '?' + utils.dict_to_qs(query) resp = self.s.get( self.config['P2P_API_ROOT'] + url, headers=self.http_headers(if_modified_since=if_modified_since), verify=False) resp_log = self._check_for_errors(resp, url) try: ret = utils.parse_response(resp.json()) if 'ETag' in resp.headers: ret['etag'] = resp.headers['ETag'] return ret except ValueError: log.error('JSON VALUE ERROR ON SUCCESSFUL RESPONSE %s' % resp_log) raise
def put_json(self, url, data): payload = json.dumps(utils.parse_request(data)) resp = self.s.put( self.config['P2P_API_ROOT'] + url, data=payload, headers=self.http_headers('application/json'), verify=False) resp_log = self._check_for_errors(resp, url) if resp.content == "" and resp.status_code < 400: return {} else: try: return utils.parse_response(resp.json()) except Exception: log.error('THERE WAS AN EXCEPTION WHILE TRYING TO PARSE YOUR JSON: %s' % resp_log) raise
def get(self, url, query=None, if_modified_since=None): if query is not None: url += '?' + utils.dict_to_qs(query) log.debug("GET: %s" % url) resp = self.s.get( self.config['P2P_API_ROOT'] + url, headers=self.http_headers(if_modified_since=if_modified_since), verify=False) resp_log = self._check_for_errors(resp, url) try: ret = utils.parse_response(resp.json()) if 'ETag' in resp.headers: ret['etag'] = resp.headers['ETag'] return ret except ValueError: log.error('JSON VALUE ERROR ON SUCCESSFUL RESPONSE %s' % resp_log) raise
def put_json(self, url, data): payload = json.dumps(utils.parse_request(data)) resp = self.s.put(self.config['P2P_API_ROOT'] + url, data=payload, headers=self.http_headers('application/json'), verify=False) resp_log = self._check_for_errors(resp, url) if resp.content == "" and resp.status_code < 400: return {} else: try: return utils.parse_response(resp.json()) except Exception: log.error( 'THERE WAS AN EXCEPTION WHILE TRYING TO PARSE YOUR JSON: %s' % resp_log) raise
def post_json(self, url, data): payload = json.dumps(utils.parse_request(data)) log.debug("GET: %s" % url) resp = self.s.post( self.config['P2P_API_ROOT'] + url, data=payload, headers=self.http_headers('application/json'), verify=False ) resp_log = self._check_for_errors(resp, url) if resp.content == "" and resp.status_code < 400: return {} else: try: return utils.parse_response(resp.json()) except Exception: log.error('EXCEPTION IN JSON PARSE: %s' % resp_log) raise
def get(self, url, query=None): if query is not None: url += '?' + utils.dict_to_qs(query) resp = requests.get( self.config['P2P_API_ROOT'] + url, headers=self.http_headers(), verify=False) if self.debug: log.debug('URL: %s' % url) log.debug('HEADERS: %s' % self.http_headers()) log.debug('STATUS: %s' % resp.status_code) log.debug('RESPONSE_BODY: %s' % resp.content) if resp.status_code >= 500: resp.raise_for_status() elif resp.status_code >= 400: try: data = resp.json() except ValueError: data = resp.text raise P2PException(resp.content, data) return utils.parse_response(resp.json())
def getRegionChildren(state, county, city, childtype): search_params = { "city": city, "state": state, "childtype": childtype, 'county': county, "zws_id": Zillow_API_key } region_tags = (('id'), ('name'), ('zindex'), ('latitude'), ('longitude')) region_cols = ['id', 'name', 'zindex', 'latitude', 'longitude'] ##Get starting home data## r = utils.get_response(api='regionChildren', params=search_params) home = utils.parse_response(response=r, api='regionChildren', tags=region_tags, cols=region_cols) #print(home) return home
home_cols = ['street', 'zipcode', 'city', 'state', 'latitude', 'longitude', 'currency1', 'valuation_high', 'currency2', 'valuation_low', 'currency3' ,'zestimate', 'zpid'] comp_cols = ['city', 'latitude', 'longitude', 'state', 'street', 'zipcode', 'valuation_high', 'currency1', 'valuation_low', 'currency2', 'zestimate', 'currency3', 'zpid'] ##Get starting home data## r = utils.get_response(api = 'search', params = search_params) home = utils.parse_response(response = r, api = 'search', tags = search_tags, cols = home_cols) ##Get Comps for original property comp_response = utils.get_response(api = 'comp', params = search_params) comps = utils.parse_response(response = comp_response, api = 'comp', tags = comp_tags, cols = comp_cols) ##Combine data and write to csv home = home[comp_cols] data_list = [home,comps]
for participant in to_process: teams.append({ 'id': participant['Team ID'], 'group_ids': [args.group_id], 'name': participant['队伍名'], 'display_name': participant['队伍名'], 'organization_id': 'BUAA', 'location': participant['座位'], }) accounts.append([ 'team', participant['姓名'], participant['用户名'], participant['密码'], ]) response = utils.post( 'users/teams', files={'json': ('teams.json', json.dumps(teams))}) print(utils.parse_response(response)) str_out = io.StringIO() csv.register_dialect('tsv_dialect', delimiter='\t') writer = csv.writer(str_out, dialect='tsv_dialect') writer.writerows(accounts) response = utils.post('users/accounts', files={ 'tsv': ('accounts.tsv', str_out.getvalue()), }) print(utils.parse_response(response))
with open('contest.xml', 'r') as fpin: root = ET.parse(fpin).getroot() problems_node = root.find('problems').findall('problem') problems = list() for problem in problems_node: short_name = problem.attrib['url'].split('/')[-1] problems.append(short_name) if args.contest_id is None: response = utils.post('contests', files={ 'yaml': ('contest.yaml', yaml.dump(contest_config)), }) contest_id = utils.parse_response(response) exist_problems = set() else: contest_id = args.contest_id response = utils.get(f'contests/{contest_id}/problems') exist_problems = utils.parse_response(response) exist_problems = {problem['externalid'] for problem in exist_problems} os.chdir('domjudge') for id, problem in enumerate(problems): with open(f'{problem}.zip', 'rb') as fpin: problem_index = chr(id + ord('A')) name = f'{problem_index}-{problem}' # if response.status_code == 400 and 'externalid' in response.json()['message']: # print(f'Problem {id} {problem} already exists, ignored...') if name in exist_problems:
def parse_response(self, xml, url): return utils.parse_response(xml, self.secret, url)
import subprocess import sys import shutil from config import * import utils students = list() with open('../output/participant_info.csv', 'r') as fpin: info = list(csv.reader(fpin))[1:] for student in info: students.append((student[2], student[3])) random.shuffle(students) response = utils.get(f'contests/{locust_contest_id}/problems') domjudge_problems = utils.parse_response(response) problems_id = [p['id'] for p in domjudge_problems] with open(os.path.join(contest_path, 'contest.xml'), 'r') as fpin: root = ET.parse(fpin).getroot() codes = list() problems_node = root.find('problems').findall('problem') for problem in problems_node: short_name = problem.attrib['url'].split('/')[-1] code = list() for root, dirs, files in os.walk( os.path.join(contest_path, f'domjudge/{short_name}/submissions')): for file in files: with open(os.path.join(root, file)) as fpin: ext = os.path.splitext(file)[-1] language = { '.c': 'c',