def main(): session = get_session() all_data = {} iam_client = session.client('iam') for region in get_all_regions(session): all_data[region] = {} ec2_client = session.client('ec2', region_name=region) print('Processing region: %s' % region) iterator = yield_handling_errors(get_instance_profiles, ec2_client, iam_client) iterator = enumerate(iterator) for i, instance_profile_policy in iterator: all_data[region][i] = instance_profile_policy sys.stdout.write('.') sys.stdout.flush() os.makedirs('output', exist_ok=True) json_writer('output/instance_profile_policies.json', all_data) json_printer(all_data)
def main(): all_data = {} session = get_session() for region in get_all_regions(session): all_data[region] = {} client = session.client('apigateway', region_name=region) iterator = yield_handling_errors(get_api_gateways_for_region, client) for rest_api in iterator: api_id = rest_api['id'] print('Region: %s / API ID: %s' % (region, api_id)) try: authorizers = get_authorizers(client, api_id) except Exception as e: msg = 'Failed to retrieve authorizers for %s @ %s. Error: "%s"' args = (api_id, region, e) print(msg % args) authorizers = {} all_data[region][api_id] = {} all_data[region][api_id]['main'] = rest_api all_data[region][api_id]['authorizers'] = authorizers else: print('Region: %s / No API gateways' % region) os.makedirs('output', exist_ok=True) json_writer('output/api-gateways.json', all_data) json_printer(all_data)
def main(): session = get_session() all_data = {} for region in get_all_regions(session): all_data[region] = {} client = session.client('rds', region_name=region) for snapshot in get_shapshots_for_region(client): snapshot_id = snapshot['DBSnapshotIdentifier'] print('Region: %s / Snapshot: %s' % (region, snapshot_id)) try: attributes = get_snapshot_attributes(client, snapshot_id) except Exception as e: msg = 'Failed to retrieve attributes for %s @ %s. Error: "%s"' args = (snapshot_id, region, e) print(msg % args) attributes = {} all_data[region][snapshot_id] = {} all_data[region][snapshot_id]['main'] = snapshot all_data[region][snapshot_id]['attributes'] = attributes else: print('Region: %s / No snapshots found' % (region, )) os.makedirs('output', exist_ok=True) json_writer('output/rds-snapshots.json', all_data) json_printer(all_data)
def main(): session = get_session() all_data = {} for region in get_all_regions(session): all_data[region] = {} client = session.client('lambda', region_name=region) iterator = yield_handling_errors(get_lambda_functions_for_region, client) for lambda_function in iterator: function_name = lambda_function['FunctionName'] print('Region: %s / Lambda function: %s' % (region, function_name)) function_details = get_function(client, function_name) function_policy = get_policy(client, function_name) all_data[region][function_name] = {} all_data[region][function_name]['main'] = lambda_function all_data[region][function_name]['details'] = function_details all_data[region][function_name]['policy'] = function_policy if not all_data[region]: print('Region %s / No Lambda functions' % region) continue os.makedirs('output', exist_ok=True) json_writer('output/lambda-functions.json', all_data) json_printer(all_data)
def login(username, password): """ login library system. Args: username ([str]): webap username password ([str]): webap password Returns: [int]: LIBRARY_LOGIN_SUCCESS(710) LIBRARY_LOGIN_FAIL(712) LIBRARY_ERROR(713) CACHE_LIBRARY_ERROR(714) CACHE_LIBRARY_LOGIN_SUCCESS(715) """ # check webap cookie exist if red_bin.exists('library_cookie_%s' % username): return error_code.CACHE_LIBRARY_LOGIN_SUCCESS session = get_session() login_status = library_crawler.login(session=session, username=username, password=password) if isinstance(login_status, int): # save cookie to redis if login_status == error_code.LIBRARY_LOGIN_SUCCESS: red_bin.set(name='library_cookie_%s' % username, value=pickle.dumps(session.cookies), ex=config.CACHE_LIBRARY_EXPIRE_TIME) return error_code.CACHE_LIBRARY_LOGIN_SUCCESS return login_status return error_code.CACHE_LIBRARY_ERROR
def userinfo(username): """library user info. Args: username ([str]): username of NKUST ap system, actually your NKUST student id. Returns: [str]: json (str) [int]: CACHE_LIBRARY_ERROR LIBRARY_ERROR """ if not red_bin.exists('library_cookie_%s' % username): return error_code.CACHE_LIBRARY_ERROR redis_name = "library_user_info_{username}".format(username=username) if red_string.exists(redis_name): return red_string.get(redis_name) # load redis cookie session = get_session() session.cookies = pickle.loads(red_bin.get('library_cookie_%s' % username)) user_info = library_crawler.user_info(session=session) if isinstance(user_info, dict): _res_dumps = json.dumps(user_info, ensure_ascii=False) red_string.set(name=redis_name, value=_res_dumps, ex=config.CACHE_LIBRARY_USER_INFO_EXPIRE_TIME) return _res_dumps return error_code.LIBRARY_ERROR
def bus_violation(username): """bus timetable query, use config.CACHE_BUS_TIMETABLE_EXPIRE_TIME to expire data. Args: username ([str]): webap username Returns: [str]: result type is json. [int]: CACHE_BUS_COOKIE_ERROR(612) BUS_TIMEOUT_ERROR(604) BUS_ERROR(605) """ if not red_bin.exists('bus_cookie_%s' % username): return error_code.CACHE_BUS_COOKIE_ERROR redis_name = "bus_violation-records_{username}".format(username=username) if red_string.exists(redis_name): return red_string.get(redis_name) session = get_session() session.cookies = pickle.loads(red_bin.get('bus_cookie_%s' % username)) result = bus_crawler.get_violation_records(session=session) if isinstance(result, list): return_data = {"reservation": result} json_dumps_data = json.dumps(return_data, ensure_ascii=False) red_string.set(name=redis_name, value=json_dumps_data, ex=config.CACHE_BUS_VIOLATION_RECORD_EXPIRE_TIME) return json_dumps_data # return error code return result
def main(): session = get_session() all_data = {} for region in get_all_regions(session): ec2_client = session.client('ec2', region) all_data[region] = {} print('Processing region: %s' % region) iterator = yield_handling_errors(get_snapshots, ec2_client) iterator = enumerate(iterator) for i, snapshot in iterator: all_data[region][i] = snapshot sys.stdout.write('.') sys.stdout.flush() if all_data[region]: print('\n') os.makedirs('output', exist_ok=True) json_writer('output/ec2_snapshots.json', all_data) json_printer(all_data)
def login(username, password): """ login leave system. Args: username ([str]): webap username password ([str]): webap password Returns: [int]: LEAVE_LOGIN_TIMEOUT(801) LEAVE_LOGIN_FAIL (803) CACHE_LEAVE_LOGIN_SUCCESS (804) CACHE_LEAVE_ERROR (805) """ # check leave cookie exist if red_bin.exists('leave_cookie_%s' % username): return error_code.CACHE_LEAVE_LOGIN_SUCCESS session = get_session() login_status = leave_crawler.login( session=session, username=username, password=password) if isinstance(login_status, int): if login_status == error_code.LEAVE_LOGIN_SUCCESS: red_bin.set(name='leave_cookie_%s' % username, value=pickle.dumps(session.cookies), ex=config.CACHE_LEAVE_COOKIE_EXPIRE_TIME) return error_code.CACHE_LEAVE_LOGIN_SUCCESS else: return login_status return error_code.CACHE_LEAVE_ERROR
def main(): session = get_session() actions = [ 'sts:AssumeRole' ] iam_client = session.client('iam') all_principals = itertools.chain( get_users(iam_client), get_groups(iam_client), get_roles(iam_client), ) allowed_principals = [] for principal in all_principals: evaluation_result = iam_client.simulate_principal_policy( PolicySourceArn=principal, ActionNames=actions ) if evaluation_result['EvaluationResults'][0]['EvalDecision'] == 'allowed': allowed_principals.append(principal) sys.stdout.write('A') sys.stdout.flush() else: sys.stdout.write('.') sys.stdout.flush() print('\n') print('These principals are allowed to %s:' % actions) print('\n'.join([' - %s' % ap for ap in allowed_principals]))
def main(): session = get_session() all_data = {} for region in get_all_regions(session): all_data[region] = {} client = session.client('cognito-identity', region_name=region) print('Processing region: %s' % region) for i, id_pool in enumerate(get_id_pools(client)): id_pool_id = id_pool['IdentityPoolId'] id_pool = client.describe_identity_pool(IdentityPoolId=id_pool_id) pool_roles = client.get_identity_pool_roles( IdentityPoolId=id_pool_id) all_data[region][id_pool_id] = {} all_data[region][id_pool_id]['describe'] = id_pool all_data[region][id_pool_id]['roles'] = pool_roles sys.stdout.write('.') sys.stdout.flush() os.makedirs('output', exist_ok=True) json_writer('output/cognito-id-pools.json', all_data) json_printer(all_data)
def query_empty_room(room_id, year, semester): """/user/empty-room/info Query the room course table In this function, without use cache_ap_query use webap_crawler.query and use GUEST account. Args: room_id ([str]): After get from room_list year ([str]): 107 108. semester ([str]): semester 1,2... Returns: [str]: result type is json """ cache_redis_name = "room_coursetable_{room_id}_{year}_{semester}".format( room_id=room_id, year=year, semester=semester) if red_string.exists(cache_redis_name): return red_string.get(cache_redis_name) login_status = login(username=config.AP_GUEST_ACCOUNT, password=config.AP_GUEST_PASSWORD) if login_status == error_code.CACHE_WENAP_LOGIN_SUCCESS: session = get_session() # load guest cookie session.cookies = pickle.loads( red_bin.get('webap_cookie_%s' % config.AP_GUEST_ACCOUNT)) yms_data = "{year}#{semester}".format(year=year, semester=semester) query_res = webap_crawler.query(session=session, qid='ag302_02', room_id=room_id, yms_yms=yms_data) if query_res == False: return error_code.QUERY_EMPTY_ROOM_ERROR elif isinstance(query_res, requests.models.Response): room_coursetable_data = json.dumps(parse.query_room( query_res.text)) if len(room_coursetable_data) < 160: # avoid null data save in redis. return error_code.CACHE_WEBAP_ERROR red_string.set(name=cache_redis_name, value=room_coursetable_data, ex=config.CACHE_SEMESTERS_EXPIRE_TIME) return room_coursetable_data else: return error_code.CACHE_WEBAP_ERROR return error_code.CACHE_WEBAP_ERROR
def login(username, password): """login to webap If user was logged in before JWT_EXPIRE_TIME, redis will save SHA-256(username+password) to check user in JWT_EXPIRE_TIME. Avoid multiple login in to NKUST server in short time. Args: username ([str]): webap username password ([str]): webap password Returns: [int]: login status. utils/error_code.py CACHE_WENAP_LOGIN_SUCCESS (110) CACHE_WEBAP_LOGIN_FAIL (111) CACHE_WEBAP_SERVER_ERROR (112) CACHE_WEBAP_ERROR (113) """ # check username and password without use NKUST server if red_string.exists('api_login_%s' % username): s = hashlib.sha256() s.update((username + password).encode('utf-8')) user_hash = s.hexdigest() if red_string.get('api_login_%s' % username) == user_hash: # check webap cookie exist if red_bin.exists('webap_cookie_%s' % username): return error_code.CACHE_WENAP_LOGIN_SUCCESS session = get_session() login_status = webap_crawler.login(session=session, username=username, password=password) if login_status == error_code.WENAP_LOGIN_SUCCESS: # save user hash to redis s = hashlib.sha256() s.update((username + password).encode('utf-8')) user_hash = s.hexdigest() red_string.set(name='api_login_%s' % username, value=user_hash, ex=config.CACHE_USER_HASH_EXPIRE_TIME) # save cookie to redis red_bin.set(name='webap_cookie_%s' % username, value=pickle.dumps(session.cookies), ex=config.CACHE_WEBAP_COOKIE_EXPIRE_TIME) return error_code.CACHE_WENAP_LOGIN_SUCCESS elif login_status == error_code.WEBAP_LOGIN_FAIL: return error_code.CACHE_WEBAP_LOGIN_FAIL elif login_status == error_code.WEBAP_SERVER_ERROR: return error_code.CACHE_WEBAP_SERVER_ERROR elif login_status == error_code.WEBAP_ERROR: return error_code.CACHE_WEBAP_ERROR return error_code.CACHE_WEBAP_ERROR
def main(): session = get_session() all_data = {} for region in get_all_regions(session): all_data[region] = get_findings(session, region) os.makedirs('output', exist_ok=True) json_writer('output/guardduty.json', all_data) json_printer(all_data)
def bus_reserve_book(username, kid, action): """User reservations record query, use config.CACHE_BUS_TIMETABLE_EXPIRE_TIME to expire data. Args: username ([str]): webap username Returns: [dict]: result type is json. [int]: CACHE_BUS_COOKIE_ERROR(612) CACHE_BUS_USER_ERROR(613) BUS_TIMEOUT_ERROR(604) BUS_ERROR(605) """ if not red_bin.exists('bus_cookie_%s' % username): return error_code.CACHE_BUS_COOKIE_ERROR session = get_session() session.cookies = pickle.loads(red_bin.get('bus_cookie_%s' % username)) result = bus_crawler.book(session=session, kid=kid, action=action) if isinstance(result, dict): if result['success']: # clear all bus cache, because data changed. for key in red_string.scan_iter( 'bus_reservations_{username}*'.format(username=username)): red_string.delete(key) # remake redis user cache pool.apply_async(func=bus_reservations_record, args=(username, )) # delete old main timetable if result.get("busTime"): book_time = datetime.fromtimestamp( int(result.get("busTime")) / 1000) # update new main timetable pool.apply_async(func=get_and_update_timetable_cache, args=( session, book_time.year, book_time.month, book_time.day, )) return result else: return result elif result == error_code.BUS_USER_WRONG_CAMPUS_OR_NOT_FOUND_USER: # clear user cache cookie red_bin.delete('bus_cookie_%s' % username) return error_code.CACHE_BUS_USER_ERROR # return error code return result
def coursetable(username, year, semester): """After use webap_login_cache_required. Retrun course table. This function not save html in redis, is json(str). Because parse course table is too ...( ;u; ) Args: username ([str]): NKUST webap username year ([str]): 107 108 .. term year semester ([str]): semester Returns: [str]: coursetable_data, json (str) in any error [int]: COURSETABLE_PARSE_ERROR COURSETABLE_QUERY_ERROR WEBAP_ERROR """ if red_string.exists('coursetable_%s_%s_%s' % (username, year, semester)): return red_string.get('coursetable_%s_%s_%s' % (username, year, semester)) session = get_session() # load webap cookie session.cookies = pickle.loads(red_bin.get('webap_cookie_%s' % username)) query_res = webap_crawler.query(session=session, qid='ag222', arg01=year, arg02=semester) if not query_res: return error_code.COURSETABLE_QUERY_ERROR elif isinstance(query_res, requests.models.Response): res = parse.coursetable(query_res.text) if res is False: return error_code.COURSETABLE_PARSE_ERROR coursetable_data = json.dumps(res) red_string.set(name='coursetable_%s_%s_%s' % (username, year, semester), value=coursetable_data, ex=config.CACHE_COURSETABLE_EXPIRE_TIME) return coursetable_data return error_code.WEBAP_ERROR
def cache_ap_query(username, qid, expire_time=config.CACHE_WEBAP_QUERY_DEFAULT_EXPIRE_TIME, **kwargs): """cache query save html cache to redis Args: username ([str]): use to get redis cache or set. qid ([str]): NKUST query url qrgs expire_time ([int]): Defaults to config.CACHE_WEBAP_QUERY_DEFAULT_EXPIRE_TIME. kwargs: (e.g.) cache_ap_query(username, qid='ag008', yms='107,2', arg01='107', arg02='2') post data will = { 'yms':'107,2', 'arg01':'107', 'arg02':'2' } Returns: [str]: html. [bool]: something erorr False. """ if not red_bin.exists('webap_cookie_%s' % username): return error_code.CACHE_AP_QUERY_COOKIE_ERROR # webap_query_1105133333_ag008_107,2_... redis_name = "webap_query_{username}_{qid}".format( username=username, qid=qid) + '_'.join(map(str, kwargs.values())) # return cache (html) if red_string.exists(redis_name): return red_string.get(redis_name) # load redis cookie session = get_session() session.cookies = pickle.loads(red_bin.get('webap_cookie_%s' % username)) res = webap_crawler.query(session=session, qid=qid, **kwargs) if res != False: if res.status_code == 200: red_string.set(name=redis_name, value=res.text, ex=expire_time) return res.text return False
def main(): session = get_session() # TODO: Change this to a command line parameters security_group_id = 'sg-0ea...' region_name = 'us-west-2' ec2_client = session.client('ec2', region_name=region_name) filters = [{'Name': 'group-id', 'Values': [security_group_id]}] result = ec2_client.describe_network_interfaces(Filters=filters) result.pop('ResponseMetadata') json_printer(result)
def room_list(campus): """/user/room/list campus 1=建工/2=燕巢/3=第一/4=楠梓/5=旗津 get campus room list In this function, without use cache_ap_query use webap_crawler.query and use GUEST account. Returns: [str]: result type is json error: [int] ROOM_LIST_ERROR CACHE_WEBAP_LOGIN_FAIL (111) CACHE_WEBAP_SERVER_ERROR (112) CACHE_WEBAP_ERROR (113) """ if red_string.exists('campus_%s' % campus): return red_string.get('campus_%s' % campus) login_status = login(username=config.AP_GUEST_ACCOUNT, password=config.AP_GUEST_PASSWORD) if login_status == error_code.CACHE_WENAP_LOGIN_SUCCESS: session = get_session() # load guest cookie session.cookies = pickle.loads( red_bin.get('webap_cookie_%s' % config.AP_GUEST_ACCOUNT)) query_res = webap_crawler.query(session=session, qid='ag302_01', cmp_area_id=campus) if query_res == False: return error_code.ROOM_LIST_ERROR elif isinstance(query_res, requests.models.Response): room_list_data = json.dumps(parse.room_list(query_res.text)) red_string.set(name='campus_%s' % campus, value=room_list_data, ex=config.CACHE_SEMESTERS_EXPIRE_TIME) return room_list_data else: return error_code.CACHE_WEBAP_ERROR return error_code.CACHE_WEBAP_ERROR
def cache_graduation_threshold(username, password): """Retrun graduation threshold. **NKUST maybe abandon this function** Args: username ([str]): NKUST webap username password ([str]): NKUST webap password Returns: [str]: json(str) in any error [int]: CACHE_WEBAP_LOGIN_FAIL CACHE_WEBAP_SERVER_ERROR CACHE_WEBAP_ERROR GRADUATION_ERROR """ # graduation_username if red_string.exists('graduation_%s' % username): return red_string.get(('graduation_%s' % username)) login_status = login(username=username, password=password) if login_status == error_code.CACHE_WENAP_LOGIN_SUCCESS: # load user cookie session = get_session() session.cookies = pickle.loads( red_bin.get('webap_cookie_%s' % username)) graduation_req = webap_crawler.graduation_threshold(session=session) if graduation_req != False: if isinstance(graduation_req.text, str): res = parse.graduation(graduation_req.text) if res != False: dump = json.dumps(res, ensure_ascii=False) red_string.set(name='graduation_%s' % username, value=dump, ex=config.CACHE_GRADUTION_EXPIRE_TIME) return dump else: return login_status return error_code.GRADUATION_ERROR
def main(): session = get_session() route53_client = session.client('route53') zone_info = route53_client.list_hosted_zones() all_data = dict() for zone in zone_info.get('HostedZones'): zone_name = zone['Name'] zone_id = zone['Id'] # remove trailing dot zone_name = zone_name[:-1] dump_route53_records(route53_client, zone_name, zone_id, all_data) json_writer('output/route53_dump.json', all_data) return all_data
def semesters(): """/user/semesters In this function, without use cache_ap_query use webap_crawler.query and use GUEST account. Returns: [str]: result type is json Why don't use dict? redis can't save dict :P error: [int] SEMESTERS_QUERY_ERROR CACHE_WEBAP_LOGIN_FAIL (111) CACHE_WEBAP_SERVER_ERROR (112) CACHE_WEBAP_ERROR (113) """ if red_string.exists('semesters'): return red_string.get('semesters') login_status = login(username=config.AP_GUEST_ACCOUNT, password=config.AP_GUEST_PASSWORD) if login_status == error_code.CACHE_WENAP_LOGIN_SUCCESS: session = get_session() # load guest cookie session.cookies = pickle.loads( red_bin.get('webap_cookie_%s' % config.AP_GUEST_ACCOUNT)) query_res = webap_crawler.query(session=session, qid='ag304_01') if query_res == False: return error_code.SEMESTERS_QUERY_ERROR elif isinstance(query_res, requests.models.Response): semesters_data = json.dumps(parse.semesters(query_res.text)) red_string.set(name='semesters', value=semesters_data, ex=config.CACHE_SEMESTERS_EXPIRE_TIME) return semesters_data else: return error_code.CACHE_WEBAP_ERROR return error_code.CACHE_WEBAP_ERROR
def main(): session = get_session() all_data = {} for region in get_all_regions(session): all_data[region] = {} client = session.client('kms', region_name=region) keys_for_region = get_keys_for_region(client) if not keys_for_region: print('Region: %s / No KMS keys' % region) continue for key in keys_for_region: print('Region: %s / KeyId: %s' % (region, key)) grants = [] policies = [] try: grants = get_key_grants(client, key) except Exception as e: msg = 'Failed to retrieve grants for %s @ %s. Error: "%s"' args = (key, region, e) print(msg % args) try: policies = get_key_policies(client, key) except Exception as e: msg = 'Failed to retrieve policies for %s @ %s. Error: "%s"' args = (key, region, e) print(msg % args) all_data[region][key] = {} all_data[region][key]['grants'] = grants all_data[region][key]['policies'] = policies os.makedirs('output', exist_ok=True) json_writer('output/key-grants.json', all_data) json_printer(all_data)
def get_leave_list(username, year, semester): """leave list Args: username ([str]): NKUST webap username year ([str]): 107 108 .. term year semester ([str]): semester Returns: [str]: result type is json. [int]:CACHE_LEAVE_ERROR """ redis_name = "leave_list_{username}_{year}_{semester}".format( username=username, year=year, semester=semester) if red_string.exists(redis_name): return red_string.get(redis_name) session = get_session() session.cookies = pickle.loads(red_bin.get('leave_cookie_%s' % username)) list_data = leave_crawler.get_leave_list( session=session, year=year, semester=semester) if isinstance(list_data, list) and len(list_data) == 2: return_data = { "data": list_data[0], "timeCodes": list_data[1] } json_dumps_data = json.dumps(return_data, ensure_ascii=False) red_string.set( name=redis_name, value=json_dumps_data, ex=config.CACHE_LEAVE_LIST_EXPIRE_TIME) return json_dumps_data return error_code.CACHE_LEAVE_ERROR
def bus_reservations_record(username): """User reservations record query, use config.CACHE_BUS_TIMETABLE_EXPIRE_TIME to expire data. Args: username ([str]): webap username Returns: [str]: result type is json. [int]: CACHE_BUS_COOKIE_ERROR(612) CACHE_BUS_USER_ERROR(613) BUS_TIMEOUT_ERROR(604) BUS_ERROR(605) """ if not red_bin.exists('bus_cookie_%s' % username): return error_code.CACHE_BUS_COOKIE_ERROR redis_name = "bus_reservations_{username}".format(username=username) if red_string.exists(redis_name): return red_string.get(redis_name) session = get_session() session.cookies = pickle.loads(red_bin.get('bus_cookie_%s' % username)) result = bus_crawler.reserve(session=session) if isinstance(result, list): return_data = {"data": result} json_dumps_data = json.dumps(return_data, ensure_ascii=False) red_string.set(name=redis_name, value=json_dumps_data, ex=config.CACHE_BUS_USER_RESERVATIONS) return json_dumps_data elif result == error_code.BUS_USER_WRONG_CAMPUS_OR_NOT_FOUND_USER: # clear user cache cookie red_bin.delete('bus_cookie_%s' % username) return error_code.CACHE_BUS_USER_ERROR # return error code return result
def graduate_user_info(username): """Get graduate name from webap header. save string data in redis Args: username ([str]): [description] Returns: [str]: result type is json error [int]: CACHE_AP_QUERY_COOKIE_ERROR USER_INFO_ERROR """ if not red_bin.exists('webap_cookie_%s' % username): return error_code.CACHE_AP_QUERY_COOKIE_ERROR redis_name = "graduate_user_info_{username}".format(username=username) if red_string.exists(redis_name): return red_string.get(redis_name) # load redis cookie session = get_session() session.cookies = pickle.loads(red_bin.get('webap_cookie_%s' % username)) html = webap_crawler.graduate_user_info(session=session) if html is not False: if isinstance(html.text, str): res = parse.graduate_user_info(html=html.text) if isinstance(res, dict): res['id'] = username _res_dumps = json.dumps(res, ensure_ascii=False) red_string.set(name=redis_name, value=_res_dumps, ex=config.CACHE_GRADUATE_USER_INFO_EXPIRE_TIME) return _res_dumps return error_code.USER_INFO_ERROR
def login(username, password): """ login bus system. Args: username ([str]): webap username password ([str]): webap password Returns: [int]: BUS_JS_ERROR(601) BUS_USER_WRONG_CAMPUS_OR_NOT_FOUND_USER(602) BUS_WRONG_PASSWORD(603) BUS_TIMEOUT_ERROR(604) BUS_ERROR(605) CACHE_BUS_LOGIN_SUCCESS(610) CACHE_BUS_ERROR(611) """ # check webap cookie exist if red_bin.exists('bus_cookie_%s' % username): return error_code.CACHE_BUS_LOGIN_SUCCESS session = get_session() login_status = bus_crawler.login(session=session, username=username, password=password) if isinstance(login_status, dict): # save cookie to redis red_bin.set(name='bus_cookie_%s' % username, value=pickle.dumps(session.cookies), ex=config.CACHE_BUS_COOKIE_EXPIRE_TIME) return error_code.CACHE_BUS_LOGIN_SUCCESS elif isinstance(login_status, int): return login_status return error_code.CACHE_BUS_ERROR
def get_submit_info(username): """get submit info Args: username ([str]): NKUST webap username Returns: [str]: result type is json. [int]:CACHE_LEAVE_ERROR """ redis_name = "leave_list_{username}_submit_info".format( username=username) if red_string.exists(redis_name): return red_string.get(redis_name) session = get_session() session.cookies = pickle.loads(red_bin.get('leave_cookie_%s' % username)) data = leave_crawler.get_submit_info( session=session) if isinstance(data, dict): json_dumps_data = json.dumps(data, ensure_ascii=False) red_string.set( name=redis_name, value=json_dumps_data, ex=config.CACHE_LEAVE_SUBMIT_EXPIRE_TIME) return json_dumps_data elif isinstance(data, int): if data == error_code.LEAVE_SUBMIT_INFO_GRADUATE_ERROR: return error_code.LEAVE_SUBMIT_INFO_GRADUATE_ERROR return error_code.CACHE_LEAVE_ERROR
def main(): session = get_session() all_data = {} client = session.client('iam') for role_name in get_role_names(client): print('RoleName: %s' % (role_name, )) roles = [] try: role_details = get_role_details(client, role_name) except Exception as e: msg = 'Failed to retrieve role for %s. Error: "%s"' args = (role_name, e) print(msg % args) all_data[role_name] = role_details os.makedirs('output', exist_ok=True) json_writer('output/role-details.json', all_data) json_printer(all_data)
def bus_query(username, year, month, day): """bus timetable query, use config.CACHE_BUS_TIMETABLE_EXPIRE_TIME to expire data. Args: username ([str]): webap username year ([int]): year, common era. month ([int]): month. day ([int]): day. Returns: [str]: result type is json. [int]: CACHE_BUS_COOKIE_ERROR(612) CACHE_BUS_USER_ERROR(613) BUS_TIMEOUT_ERROR(604) BUS_ERROR(605) """ if not red_bin.exists('bus_cookie_%s' % username): return error_code.CACHE_BUS_COOKIE_ERROR redis_name = "bus_timetable_{year}_{month}_{day}".format(year=year, month=month, day=day) session = get_session() session.cookies = pickle.loads(red_bin.get('bus_cookie_%s' % username)) user_book_data = pool.apply_async(bus_reservations_record, (username, )) if red_string.exists(redis_name): main_timetable = json.loads(red_string.get(redis_name)) else: main_timetable = get_and_update_timetable_cache( session, year, month, day) if isinstance(main_timetable, list): user_book_data = user_book_data.get() if not isinstance(user_book_data, str): return error_code.BUS_ERROR # mix cancelKey and add 'isReserve' in timetable user_reservation = json.loads(user_book_data) for bus_data in main_timetable: bus_data['cancelKey'] = '' bus_data['isReserve'] = False for reservation_data in user_reservation['data']: if reservation_data['dateTime'] == bus_data['departureTime'] and \ reservation_data['start'] == bus_data['startStation']: bus_data['isReserve'] = True bus_data['cancelKey'] = reservation_data['cancelKey'] return_data = { "date": datetime.utcnow().isoformat(timespec='seconds') + "Z", "data": main_timetable } return json.dumps(return_data, ensure_ascii=False) elif main_timetable == error_code.BUS_USER_WRONG_CAMPUS_OR_NOT_FOUND_USER: # clear user cache cookie red_bin.delete('bus_cookie_%s' % username) red_bin.delete(redis_name) return error_code.CACHE_BUS_USER_ERROR # return error code return error_code.BUS_ERROR