def create(self, parent_file_id): create_data = { "auto_rename": True, "content_hash": self.hash, "content_hash_name": 'sha1', "drive_id": self.drive_id, "hidden": False, "name": self.filename, "parent_file_id": parent_file_id, "type": "file", "size": self.filesize } create_post = requests.post( 'https://api.aliyundrive.com/v2/file/create', data=json.dumps(create_data), headers=self.headers, verify=False ) create_post_json = create_post.json() if create_post_json.get('code') == 'AccessTokenInvalid': print_info('AccessToken已失效,尝试刷新AccessToken中') if self.token_refresh(): print_info('AccessToken刷新成功,返回创建上传任务') return self.create(parent_file_id) print_error('无法刷新AccessToken,准备退出') exit() return create_post_json
def main(): url: str = '' try: if sys.argv[1]: url = sys.argv[1] except IndexError: print_error( "first command line argument must be DCSO Portal API endpoint") sys.exit(1) try: # token will be set automatically later if os.environ[ENV_PORTAL_TOKEN] == "": raise KeyError except KeyError: print_error( f"please set environment variable {ENV_PORTAL_TOKEN} using valid DCSO Portal token" ) sys.exit(1) try: client = APIClient(api_url=url) except PortalConfiguration as exc: print_error(str(exc)) sys.exit(1) check_client(client) try: response = client.execute_graphql( query='{ tdh_allIssues { id reference } }') except PortalAPIError as exc: print_error(str(exc)) sys.exit(1) if len(response.tdh_allIssues) > 0: print("All TDH Issues:") for issue in response.tdh_allIssues: print(f"{issue.id} {issue.reference}") else: print("No issues available.") issue_id = '3be13d5f-1556-4edf-a243-e7ea4db67f2e' query = """query { issue: tdh_issue(filter: {id: "%s"}) { title affectedAssets {ip} } }""" % (issue_id, ) print("Getting issue ", issue_id) try: result = client.execute_graphql(query) except PortalAPIError as exc: print_error(str(exc)) sys.exit(1) else: print(f"\n{result.issue.title} ({issue_id})") for asset in result.issue.affectedAssets: print(f"Assets:\n\t{asset.ip}")
def get_yes_no(obj, var_name): if isinstance(obj, bool): return obj elif isinstance(obj, str) and obj.lower() in ["yes", "no"]: return True if obj.lower() == "yes" else False print_error("'{}' should have value of 'yes' or 'no': {}".format( var_name, obj))
def main(): common.args.process(sys.argv) service = common.get_service("gmail") # TODO: Add a switch for result count, or at least rethink current value. results = service.users().messages().list(userId='me',maxResults=common.args[TITLE_MAX],labelIds=["INBOX"]).execute() messages = results.get('messages', []) if not messages: common.print_error('No e-mail found.') else: for message in messages: # Need to receive full message specifically. # Listing returned just a list of IDs and threads. fullMessage = service.users().messages().get(userId='me',id=message['id']).execute() headers = {} # Translate list of dictionaries to a proper dictionary. for header in fullMessage['payload']['headers']: headers[header['name']] = header # TODO: Re-work formatting. if common.args[TITLE_CSV_FORMAT]: print("\"%s\",\"%s\",\"%s\"" % (headers['Date']['value'], headers['From']['value'], headers['Subject']['value'])) else: print(headers['From']['value'],":",headers['Subject']['value'])
def system_profiler_fetch_serial(): '''Calls System Profiler to get the computer's hardware serial number. Returns an empty string if something bad happened.''' # Run command: try: output = subprocess.check_output(['/usr/sbin/system_profiler', 'SPHardwareDataType', '-xml']) except subprocess.CalledProcessError: output = None # Try to get serial_number key: if output: try: output_dict = plistlib.readPlistFromString(output) except xml.parsers.expat.ExpatError: output_dict = {} if output_dict: try: serial_number = output_dict[0]['_items'][0]['serial_number'] except KeyError: serial_number = '' # Log bad serial: if not serial_number: common.print_error("Failed to get the computer's hardware serial number.") # Return: return serial_number
def thread(task): drive = client.upload_file(task) drive.finish_time = get_timestamp() drive.spend_time = drive.finish_time - drive.start_time if drive.status != 1: print_error(os.path.basename(drive.filepath) + ' 上传失败') client.save_task(drive)
def test_connections(): '''Attempts a connection to each enrollment server. Returns true if it can connect to at least one, false otherwise.''' # Default: test_result = False # Retry counts: retry_count = 10 # Iterate over members in ENROLLMENT_SERVER_URIS_ARRAY: for server_uri in config_site.ENROLLMENT_SERVER_URIS_ARRAY: if test_result: break common.print_info("Testing connection to server %s..." % server_uri) for i in range(0,retry_count): # Assume unverified context: server_response = None try: ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE server_response = urllib.urlopen(server_uri,None,context=ssl_context) if server_response: test_result = True break except IOError: pass # Delay for next try: common.print_error("Could not contact %(server)s. Delaying 10 seconds (try %(attempt)s of %(retries)s)." % {"server":server_uri,"attempt":str(i+1),"retries":str(retry_count)}) time.sleep(10) # Return: return test_result
def send_request_expecting_xml(given_post_vars_dict,given_expected_attributes,retry_count): '''Wrapper for send_request(...) and process_response_as_xml(...) with a retry loop. Returns a server response dict with at least minimal attributes.''' # Catch invalid retry counts: if retry_count < 1: retry_count = 1 # Request/response parsing loop: for i in range(0,retry_count): response = send_request(given_post_vars_dict) response_dict = process_response_as_xml(response) # Verify keys exist: keys_exist = True for key in given_expected_attributes: try: test = response_dict[key] except KeyError: response_dict[key] = False keys_exist = False # Break loop: if keys_exist: break # Next try: common.print_error("Waiting for XML from server. Delaying 15 seconds (try %(attempt)s of %(retries)s)." % {"attempt":str(i+1),"retries":str(retry_count)}) time.sleep(15) # Return: return response_dict
def get_local_file(input, output): black_list = ["package.yaml", ".stage", "release", "control"] valid_components = [] for path in input.split("/"): if path == ".": pass elif path == "..": raise RuntimeError("'..' is inhibited in specifying a local path") else: valid_components.append(path) if len(valid_components) > 0 and valid_components[0] in black_list: raise RuntimeError( "'./{}' is reserved for system use and cannot be used to specify a local file" .format(valid_components[0])) if len(valid_components) == 0: url = "*" else: url = os.path.join(*valid_components) file_list = glob.glob(url) if len(file_list) == 0: print_error("Cannot find any file in '{}'".format(url)) create_dir(output) for file in file_list: if file in black_list: continue dest_path = os.path.join(output, file) #print "{} -> {}".format(file, dest_path) if os.path.isdir(file): shutil.copytree(file, dest_path) else: shutil.copy(file, dest_path)
def complete(self, file_id, upload_id): complete_data = { "drive_id": self.drive_id, "file_id": file_id, "upload_id": upload_id } complete_post = requests.post( 'https://api.aliyundrive.com/v2/file/complete', json.dumps(complete_data), headers=self.headers, verify=False ) complete_post_json = complete_post.json() if complete_post_json.get('code') == 'AccessTokenInvalid': print_info('AccessToken已失效,尝试刷新AccessToken中') if self.token_refresh(): print_info('AccessToken刷新成功,返回创建上传任务') self.complete(file_id, upload_id) return print_error('无法刷新AccessToken,准备退出') exit() s = time.time() - self.start_time if 'file_id' in complete_post_json: print_success('【{filename}】上传成功!消耗{s}秒'.format(filename=self.filename, s=s)) return True else: print_warn('【{filename}】上传失败!消耗{s}秒'.format(filename=self.filename, s=s)) return False
def main(): try: if sys.argv[1]: url = sys.argv[1] except IndexError: print_error( "first command line argument must be DCSO Portal API endpoint") sys.exit(1) client = APIClient(api_url=url) if not client.is_alive(): print_error(f"failed using API {url}", exit=1) else: print_info(f"API {url} ready.") try: username = prompt_input( prompter("Username", symbol='\N{BUST IN SILHOUETTE}')) password = prompt_input(prompter("Password", symbol='\N{KEY}'), echo=False) except KeyboardInterrupt: print() print_info("Authentication cancelled") sys.exit(0) if not (username and password): print_error("need both username and password", exit=1) return try: auth = sign_on(client, username, password) except PortalException as exc: print_error(str(exc), exit=1) else: print( f"Your User Token expires {auth.token.expires}:\n{auth.token.token}" ) if auth.totp_activated: print(f"TOTP Activated on: {auth.totp_activated.strftime('%c')}") client.token = auth.token.token try: perms = client.auth.user_service_permissions() except PortalException as exc: print_error(str(exc)) else: print("\nYour Permissions") print("-------------------") for perm in iter(perms): print(f"{perm.service}: {perm.slug}") print(f"\nAccess as TDH Coordinator: {perms.have('tdh-access-admin')}") # graphql_execute returns named tuples response = client.execute_graphql( query='{ auth_user { id organization { shortCode } } }') print( f"Organization ShortCode: {response.auth_user.organization.shortCode}")
def set_build_state(self, state): path = self.build_state_path() try: fp = open(path, "w") fp.write(state + "\n") fp.close() except Exception as e: print_error("Failed to write file:", e)
def check_auth(self, response_json, func): if response_json.get('code') == 'AccessTokenInvalid': print_info('AccessToken已失效,尝试刷新AccessToken中') if self.token_refresh(): print_info('AccessToken刷新成功,返回创建上传任务') return func() print_error('无法刷新AccessToken,准备退出') sys.exit()
def load_build_module(part): try: package_name = ".".join(__name__.split(".")[:-1]) module_str = ".".join([package_name, "build_module", part.build]) part.build_module = importlib.import_module(module_str) except Exception as e: print_error("Failed to import '{}' for building part '{}': {}".format( part.build, part.name, e))
def thread(task): LOCK_TOKEN_REFRESH.acquire() LOCK_TOKEN_REFRESH.release() drive = client.upload_file(task) drive.finish_time = get_timestamp() drive.spend_time = drive.finish_time - drive.start_time if drive.status != 1: print_error(os.path.basename(drive.filepath) + ' 上传失败') client.save_task(drive)
def load_module(gconfig): try: package_name = ".".join(__name__.split(".")[:-1]) module_str = ".".join( [package_name, "package_module", gconfig.package]) module = importlib.import_module(module_str) except Exception as e: print_error("Failed to import package module '{}': {}".format( gconfig.package, e)) return module
def reboot_system(): '''Reboots the Mac by running /sbin/shutdown -r now.''' try: subprocess.Popen(['/sbin/shutdown', '-r', 'now']) return True except subprocess.CalledProcessError: common.print_error("Could not run: /sbin/shutdown -r now") return False
def certificate_get_pem(given_cert): '''Given a certificate object, get its contents as a PEM string. Returns the PEM or a blank string if something bad happened.''' try: cert_pem = crypto.dump_certificate(crypto.FILETYPE_PEM, given_cert) except crypto.Error: common.print_error( "Could not produce PEM string from given certificate.") cert_pem = '' return cert_pem
def load_config(): new_text = "" try: with open(config_filename, "r") as fp: for line in fp: new_text += process_raw_text(line) #print new_text except Exception as e: print_error("Cannot open configuration file:", e) try: doc = yaml.load(new_text) except Exception as e: print_error("Cannot open configuration file:", e) addjust_keys(doc) """ check the parameters in YAML file """ if "name" not in doc: config_error("'name' is not specified") elif check_name_syntax(doc["name"]) is False: config_error("illegal syntax of 'name'") if "description" not in doc: config_error("'description' is not specified") if "architecture" not in doc: config_error("'architecture' is not specified") if "version" not in doc: config_error("'version' is not specified") check_version_syntax(doc["version"]) if "part" not in doc or isinstance(doc["part"], dict) is False: config_error("'part' is not specified or specified in wrong syntax") if "depends" in doc and doc["depends"] is not None: if not is_list_of_string(doc["depends"]): config_error("'depends' must be a list of strings") doc["depends"] = ", ".join(doc["depends"]) if "package" not in doc: doc["package"] = "debian" for name, part in doc["part"].iteritems(): check_part_config(name, part) preserved = [ "name", "version", "architecture", "description", "maintainer", "depends" ] for p in preserved: if p not in doc or doc[p] is None: doc[p] = "" del doc["part"] return doc
def networksetup_detect_network_hardware(): '''Detects network hardware via /usr/sbin/networksetup.''' try: subprocess.check_call(['/usr/sbin/networksetup', '-detectnewhardware']) common.print_info("Ran networksetup to detect network hardware.") time.sleep(10) # Let the system get IP addresses... return True except subprocess.CalledProcessError: common.print_error("Error while running networksetup to detect network hardware.") return False
def systemsetup_set_time_zone(given_locale): '''Sets the time zone to the given locale using /usr/sbin/systemsetup.''' try: subprocess.check_call(['/usr/sbin/systemsetup', '-settimezone', given_locale]) common.print_info("Ran systemsetup to set the time zone: %s" % given_locale) return True except subprocess.CalledProcessError: common.print_error("Error while running systemsetup to set the time zone.") return False
def ntpdate(given_server): '''Updates clock via NTP.''' try: subprocess.check_call(['/usr/sbin/ntpdate', '-u', given_server]) common.print_info("Ran ntpdate to update the clock: %s" % given_server) return True except subprocess.CalledProcessError: common.print_error("Error while running ntpdate to update the clock.") return False
def profiles_install(given_mobileconfig_path): '''Calls profiles to install a given config profile in the system scope. Returns true if profiles exited 0, false otherwise.''' try: subprocess.check_call(['/usr/bin/profiles', '-I', '-F', given_mobileconfig_path]) return True except subprocess.CalledProcessError: common.print_error("Could not install the configuration profile at %s." % given_mobileconfig_path) return False
def profiles_remove(given_mobileconfig_identifier): '''Calls profiles to remove a given config profile in the system scope. Returns true if profiles exited 0, false otherwise.''' try: subprocess.check_call(['/usr/bin/profiles', '-R', '-p', given_mobileconfig_identifier]) return True except subprocess.CalledProcessError: common.print_error("Could not remove the configuration profile with identifier %s." % given_mobileconfig_identifier) return False
def defaults_delete(given_key, given_plist): '''Deletes given key from given plist by calling defaults Useful for binary plists.''' try: subprocess.check_call(['/usr/bin/defaults', 'delete', given_plist, given_key]) common.print_info("Deleted %(k)s from %(p)s." % {"k":given_key,"p":given_plist}) return True except subprocess.CalledProcessError: common.print_error("Error clearing key %(k)s from %(p)s." % {"k":given_key,"p":given_plist}) return False
def process_response_as_xml(given_server_response): '''Retrieves the main dictionary from given response. If not possible, it uses a blank dictionary. Ensures that some essential keys are set in all cases.''' # Default: response_dict = {} # If response is not None: if given_server_response: common.print_info("Processing response for XML content.") try: response_dict = plistlib.readPlistFromString(given_server_response.read()) common.print_info("Response is a valid XML property list.") except xml.parsers.expat.ExpatError, NameError: common.print_error("Response is not an XML property list!")
def caffeinate_system(): '''Prohibits system sleep by starting /usr/bin/caffeinate.''' try: subprocess.Popen(['/usr/bin/caffeinate', '-d', '-i', '-m', '-s']) common.print_info("Started caffeination.") return True except subprocess.CalledProcessError: common.print_error("Could not start caffeination.") return False
def get_username(uid): try: requests_get = requests.get('https://photo.weibo.com/%s/albums?rd=1' % uid, headers=headers) except: return get_username(uid) try: get_text = requests_get.text username = re.findall(r"<title>(.+?)的专辑\s-\s微相册<\/title>", get_text)[0] return username except Exception as e: print_error('登录信息无效,请更新!') raise Exception(e)
def main(): args = parse_args() if not os.path.isfile(args.video_file): common.print_error("{!r} file does not exist.".format(args.video_file)) return 11 if args.start_time is None or args.end_time is None: common.print_error("Both start and end time are required.") return 12 if args.start_time > args.end_time: common.print_error("Start time must come before end time.") return 13 if args.end_time - args.start_time > MAX_RANGE: common.print_error("Range must not exceed {} ms.".format(MAX_RANGE)) return 14 vsource = common.get_video_source(args.video_file) sc_time = sublib.SceneChangeFile.find(vsource, args.start_time, args.end_time) print(int(sc_time))
def system_profiler_get_ethernets(): '''Calls System Profiler to make an array of Ethernet interfaces with IPs.''' # Run command: try: output = subprocess.check_output(['/usr/sbin/system_profiler', 'SPNetworkDataType', '-xml']) except subprocess.CalledProcessError: output = None # Try to get keys: if output: try: output_dict = plistlib.readPlistFromString(output) except xml.parsers.expat.ExpatError: output_dict = {} if output_dict: try: network_interfaces_list = output_dict[0]['_items'] except KeyError: network_interfaces_list = [] # Make list of Ethernet interfaces with IPs: ethernet_interfaces_with_ip_addresses = [] for interface_dict in network_interfaces_list: try: interface_identifier = interface_dict['interface'].lower() except KeyError: interface_identifier = '' try: interface_type = interface_dict['type'].lower() except KeyError: interface_type = '' try: interface_ip_addresses = interface_dict['ip_address'] except KeyError: interface_ip_addresses = [] try: interface_media = interface_dict['Ethernet']['MediaSubType'].lower() except KeyError: interface_media = '' if interface_identifier and (interface_type == 'ethernet') and (len(interface_ip_addresses) > 0): ethernet_dict = {} ethernet_dict['identifier'] = interface_identifier ethernet_dict['media'] = interface_media ethernet_dict['ip_addresses'] = interface_ip_addresses ethernet_interfaces_with_ip_addresses.append(ethernet_dict) # Log empty Ethernet interfaces: if not ethernet_interfaces_with_ip_addresses: common.print_error("No Ethernet interfaces appear to be active.") # Return: return ethernet_interfaces_with_ip_addresses
def build_prepare(part, env): state = part.get_build_state() if state in ["PREPARED", "BUILT", "INSTALLED"]: return if "build-prepare" in part.doc and part.doc["build-prepare"] is not None: prepare = part.doc["build-prepare"] if not isinstance(prepare, str): print_error("Part '{}': 'build-prepare' must be string".format( part.name)) err = execute_script(prepare.split("\n"), env) if err: print_error(err) part.set_build_state("PREPARED")
def main(log_level="info", meta_model_name="power-daps/python3", actions_to_run=["default"]): common.set_log_level(log_level) meta_model = MetaModel(meta_model_name) common.set_meta_model(meta_model_name) valid_actions = meta_model.actions() common.print_verbose('Actions to run ' + str(actions_to_run)) common.print_verbose('Valid actions ' + str([va.name for va in valid_actions])) for action_to_run in actions_to_run: if action_to_run not in [va.name for va in valid_actions]: common.print_error("Action '" + action_to_run + "' not found.") continue for valid_action in valid_actions: if valid_action.name == action_to_run: common.stop_if_failed(*valid_action.run())
def check_for_version_conflicts(): global dependency_versions (exit_code, output) = common.run_command("cut -d, -f1-2 " + common.dependency_versions_csv_path + " | sort | uniq -d") dups_found = False if output != "": dups_found = True version_conflict_found = False (exit_code, output) = common.run_command("sort " + common.dependency_versions_csv_path + " | uniq | cut -d, -f1-2 | uniq -d") if output != "": version_conflict_found = True if version_conflict_found: common.print_error("Duplicate dependency with different version numbers definitions found") return 1 elif dups_found: common.print_warning("Duplicate dependency definitions found") return 0
def run(self): error_message = "Action '" + self.action_name + "' not found" common.print_error(error_message) return 1, error_message