def add_to_result_queue(self, result_operation, retry=True): """ Adds an operation to the result queue which sends it off to the server. Arguments: result_operation An operation which must have a raw_result, urn_response, and request_method attribute. retry Determines if the result queue should continue attempting to send the operation to the server in case of a non 200 response. """ try: if not isinstance(result_operation, ResultOperation): result_operation = ResultOperation(result_operation, retry) return self._result_queue.put(result_operation) except Exception as e: logger.error("Failed to add result to queue.") logger.exception(e)
def _is_boot_up(self): """Checks whether if the agent is coming up because of a reboot or not. Returns: (bool) True if system boot up detected, False otherwise. """ current_uptime = systeminfo.uptime() boot_up = 'no' try: if os.path.exists(self._uptime_file): with open(self._uptime_file, 'r') as f: file_uptime = f.read() if current_uptime < long(file_uptime): boot_up = 'yes' except Exception as e: logger.error("Could not verify system bootup.") logger.exception(e) return boot_up
def result_queue_file_dump(self): try: queuesave.save_result_queue(self._result_queue) except Exception as e: logger.error("Failed to save result queue to file.") logger.exception(e)
def load_queue(file_path): if not os.path.exists(file_path): return OperationQueue() loaded = [] try: with open(file_path, 'r') as _file: loaded = cPickle.load(_file) except Exception as e: logger.error("Failed to load operations from: {0}".format(file_path)) logger.exception(e) loaded = [] logger.debug("Loaded operations: {0}".format(loaded)) q = OperationQueue() for operaion in loaded: q.put(operaion) return q
def _move_pkgs(self, install_data, app_plist_data): """ Move all pkgs in src to dest. """ try: product_key = app_plist_data["productKey"] src = os.path.join(settings.UpdatesDirectory, install_data.id) dest = os.path.join("/Library/Updates", product_key) if not os.path.exists(dest): self._make_dir(dest) time.sleep(3) for _file in os.listdir(src): if _file.endswith(".pkg"): su_pkg_path = os.path.join(dest, _file) if os.path.exists(su_pkg_path): os.remove(su_pkg_path) logger.debug("Removed existing pkg from /Library/Updates: %s " % su_pkg_path) src_pkg = os.path.join(src, _file) shutil.move(src_pkg, dest) logger.debug("Moved " + _file + " to: " + dest) except Exception as e: logger.error("Failed moving pkgs to /Library/Updates.") logger.exception(e) raise
def _vine_set_password(pwd=None): pwd_cmd = os.path.join(settings.BinDirectory, "storepasswd") msg = '' try: if os.path.exists(_vine_pwd_file): os.remove(_vine_pwd_file) process = subprocess.Popen( [pwd_cmd, pwd, _vine_pwd_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) output, error = process.communicate() if "storing password succeeded." in output: return True, "" msg += output if error: msg += error except Exception as e: error = "Unable to save vine password." msg += error logger.error(error) logger.exception(e) return False, msg
def _apt_install(self, package_name): logger.debug('Installing {0}'.format(package_name)) install_command = [self.APT_GET_EXE, 'install', '-y', package_name] #TODO: figure out if restart needed restart = 'false' # TODO: parse out the error, if any try: result, err = self.utilcmds.run_command(install_command) if err: # Catch non-error related messages if 'reading changelogs' in err.lower(): pass else: raise Exception(err) except Exception as e: logger.error('Faled to install {0}'.format(package_name)) logger.exception(e) return 'false', str(e), restart logger.debug('Done installing {0}'.format(package_name)) return 'true', '', restart
def complete_softwareupdate(self, install_data): """ Removes the product key directory if it exists, and lets softwareupdate download and install on its own. """ success = "false" error = "Failed to install: " + install_data.name try: app_plist_data = self._get_app_plist_data(install_data) for i in range(1, 3): remove_success = self._remove_productkey_dir(app_plist_data) if remove_success: break time.sleep(5 * i) update_name = self._get_softwareupdate_name(app_plist_data) success, error = self.softwareupdate(update_name, install_data.proc_niceness) except Exception as e: logger.error("Failed to download/install pkg with softwareupdate: %s" % install_data.name) logger.exception(e) return success, error
def initial_data(self, operation_type): """ Any initial data the server should have on first run. Args: operation_type - The type of operation determines what the plugin should return. Currently ignored for RAPlugin. Returns: (dict) Dictionary with initial RA plugin data. """ logger.debug("Sending initial ra data.") data = {"public_key": ""} try: if os.path.exists(tunnels.tunnel_pub_key): with open(tunnels.tunnel_pub_key, "r") as pub_key: data = {"public_key": pub_key.read()} except Exception as e: logger.error("Could not verfiy tunnel key. Not good.") logger.exception(e) logger.debug("Done with initial ra data.") return data
def _apt_purge(self, package_name): purged = 0 purge_command = [self.APT_GET_EXE, 'purge', '-y', package_name] try: result, err = self.utilcmds.run_command(purge_command) if err: raise Exception(err) found = re.search('\\d+ to remove', result) if found: amount_purged = found.group().split(' ')[0] purged = int(amount_purged) if purged > 0: logger.debug( 'Successfuly removed {0} packages.'.format(purged) ) return 'true', '' else: logger.info('No packages were removed.') return 'false', 'No packages removed.' except Exception as e: logger.error('Problem while uninstalling package: ' + package_name) logger.exception(e) return 'false', str(e)
def edit_operation(self, operation, result_sent=False, sent_time=None): if sent_time is None: sent_time = settings.EmptyValue try: with self._connection: cursor = self._connection.cursor() # Escape any single-quotes. Sqlite use two single quotes. values = ",".join([ "%s = '%s'" % (OperationColumn.OperationType, operation.type), "%s = '%s'" % (OperationColumn.OperationId, operation.id), "%s = '%s'" % (OperationColumn.RawOperation, operation.raw_operation.replace(r"'", r"''")), "%s = '%s'" % (OperationColumn.RawResult, operation.raw_result.replace(r"'", r"''")), "%s = '%s'" % (OperationColumn.DateTimeSent, sent_time), "%s = %s" % (OperationColumn.ResultsSent, int(result_sent)) ]) logger.debug("Editing operation %s." % operation.id) cursor.execute("UPDATE %s SET %s WHERE %s = '%s'" % ( self._operations_table, values, OperationColumn.OperationId, operation.id)) except Exception as e: logger.error("Could not edit operation %s." % operation.id) logger.exception(e)
def _get_file_data(self, data_dictionary, repo): """ Create a dictionary that contains name, uri, hash, size, and pkg_type. file_data format: "file_data":[ { "file_name" : name of package "file_uri" : uri here "file_hash" : sha256 "file_size" : size in kb's ## "pkg_type" : primary or dependency } ] """ # TODO: implement a for loop for the possibility of multiple uri's file_data = [] # uri uri = data_dictionary.get(PkgDictValues.uri, '') if uri: hostname = repo.split(' ')[0] uri = hostname + data_dictionary[PkgDictValues.uri] # name name = uri.split('/')[-1] # hash pkg_hash = data_dictionary.get(PkgDictValues.sha256, '') if not pkg_hash: pkg_hash = data_dictionary.get(PkgDictValues.sha1, '') if not pkg_hash: pkg_hash = data_dictionary.get(PkgDictValues.md5, '') # size size = data_dictionary.get('Size', '') try: size = int(size) except Exception as e: logger.error("Failed to cast file_size to int.") logger.exception(e) # If for whatever reason it fails to convert size = '' # Package is marked as primary, the dependencies added # to file_data are marked as dependency. #pkg_type = 'primary' file_data = [{FileDataKeys.name: name, FileDataKeys.uri: uri, FileDataKeys.hash: pkg_hash, FileDataKeys.size: size}] #'pkg_type': pkg_type}] return file_data
def get_file_data(self, app_name): """ Returns urls and other info corresponding to the app with given app_name. """ file_data = [] try: app_data = self.get_app_data(app_name) for pkg in app_data.get('Packages', []): pkg_data = {} uri = pkg.get('URL', '') name = uri.split('/')[-1] pkg_data['file_name'] = name pkg_data['file_uri'] = uri pkg_data['file_size'] = pkg.get('Size', '') pkg_data['file_hash'] = '' file_data.append(pkg_data) except Exception as e: logger.error('Could not get file_data/release date.') logger.exception(e) return file_data
def _osxvnc_set_password(pwd=None): msg = '' try: if os.path.exists(_osx_pwd_file): os.remove(_osx_pwd_file) with os.fdopen( os.open( _osx_pwd_file, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR # "600" permissions ), 'w' ) as handle: handle.write(pwd + '\n') except Exception as e: error = "Unable to save vine password." msg += error logger.error(error) logger.exception(e) return False, msg
def stop(): """Stops the currently running vine server. Returns: - Returns True if successful. False otherwise. """ msg = "Vine might not be running." if _process: try: _process.terminate() return True, '' except Exception as e: error = 'Issue trying to stop the vine server.' msg = error logger.error(error) logger.exception(e) return False, msg
def _parse_packages_to_install(self, data): lines = data.split('\n') install_list = [] for line in lines: if 'Inst' in line: try: #package name comes right after Inst #Example: "Inst {pkg-name} .....other data....." update_package_name = line.split(' ')[1] # New version comes right after old version #Example: "Inst {name} [current-version] (new-version ....) get_version_regex = r'.*\((\S+).*\)' update_package_version = \ re.match(get_version_regex, line).group(1) install_pkg = (update_package_name, update_package_version) install_list.append(install_pkg) except AttributeError as e: logger.error( 'Failed retrieving version for: ' + update_package_name ) logger.exception(e) return install_list
def current_filesystem_data(self): fs_list = [] try: fs_data = self.filesystem_usage() for fs in fs_data: fs_dict = {} fs_dict[MonitKey.Name] = fs[0] fs_dict[MonitKey.Used] = fs[1] fs_dict[MonitKey.PercentUsed] = fs[2] fs_dict[MonitKey.Free] = fs[3] fs_dict[MonitKey.PercentFree] = fs[4] fs_dict[MonitKey.Mount] = fs[5] fs_list.append(fs_dict.copy()) except Exception as e: logger.error("Could not retrieve mac file system data.") logger.exception(e) return fs_list
def get_agent_app(self): try: agent_app = CreateApplication.create( settings.AgentName, settings.AgentVersion, settings.AgentDescription, # description [], # file_data [], # dependencies '', # support_url '', # vendor_severity '', # file_size '', # vendor_id, '', # vendor_name settings.AgentInstallDate, # install_date None, # release_date True, # installed "", # repo "no", # reboot_required "no" # uninstallable ) return agent_app except Exception as e: logger.error("Failed to create agent application instance.") logger.exception(e) return {}
def _untar_files(self, directory): """ Scans a directory for any tar files and 'untars' them. Scans recursively just in case there's tars within tars. Deletes tar files when done. @param directory: Directory to be scanned. @return: Nothing """ tars = glob.glob(os.path.join(directory, '*.tar*')) if not tars: return import tarfile try: for tar_file in tars: tar = tarfile.open(tar_file) tar.extractall(path=directory) tar.close() os.remove(tar_file) self._untar_files(directory) except OSError as e: logger.info("Could not extract tarball.") logger.exception(e)
def _yum_update(self, package_name, proc_niceness): logger.debug('Updating: {0}'.format(package_name)) #TODO: figure out if restart needed after update restart = 'false' try: cmd = [ 'nice', '-n', CpuPriority.niceness_to_string(proc_niceness), yum.yum_cmd, 'update', '-y', package_name ] result, err = self.utilcmds.run_command(cmd) if err: if 'warning:' in err: pass else: raise Exception(err) except Exception as e: logger.error('Faled to update {0}'.format(package_name)) logger.exception(e) return 'false', str(e), restart logger.debug('Done updating {0}'.format(package_name)) return 'true', '', restart
def _load_uninstall_data(self): """Parses the 'data' key to get the application info for uninstall. Returns: A list of UninstallData types. """ uninstall_data_list = [] try: if RvOperationKey.FileData in self.json_message: data_list = self.json_message[RvOperationKey.FileData] else: data_list = [] for data in data_list: uninstall_data = UninstallData() uninstall_data.name = data[RvOperationKey.Name] uninstall_data.id = data[RvOperationKey.AppId] # uninstall_data.cli_options = data.get(RvOperationKey.CliOptions, '') uninstall_data_list.append(uninstall_data) except Exception as e: logger.error("Could not load uninstall data.") logger.exception(e) return uninstall_data_list
def main(): rel_info = ReleaseInfo() start_directory = os.getcwd() parse_args(rel_info) try: setup_test_branch(rel_info) merge_branches(rel_info) test_tag_doc(rel_info) if rel_info.args.merge_only: return build_platform(rel_info) except KeyboardInterrupt: pass except: logger.exception("[FAILED] Please check your changes. "\ "You can not pass this checker unless your branch "\ "can be merged without conflicts.") logger.info("Note: all repos are in test branch %s" % rel_info.test_branch) else: logger.info( "[PASS] Your changes can be successfully merged and build.") finally: cleanup(rel_info) os.chdir(start_directory)
def add_operation(self, operation, received_time): try: with self._connection: cursor = self._connection.cursor() values = (operation.type, operation.id, operation.raw_operation.replace(r"'", r"''"), # Escape single quote settings.EmptyValue, # raw message value received_time, settings.EmptyValue, # date time sent False) logger.debug("Adding operation %s." % operation.id) cursor.execute("INSERT INTO %s (%s) " "VALUES (?, ?, ?, ?, ?, ?, ?)" % (self._operations_table, OperationColumn.AllColumns), values) except Exception as e: logger.error("Could not add operation %s." % operation.id) logger.exception(e)
def _download_file(self, download_dir, file_uris, file_size): """ Loops through all the file_uris provided and terminates when downloaded successfully or exhausts the file_uris list. Returns: (bool) - Success download. """ # Loop through each possible uri for the package for file_uri in file_uris: logger.debug("Downloading from: {0}".format(file_uri)) file_name = os.path.basename(file_uri) download_path = os.path.join(download_dir, file_name) try: urllib.urlretrieve(file_uri, download_path) if self._check_if_downloaded(download_path, file_size): logger.debug("Downloaded successfully.") return True else: logger.error( "Failed to download from: {0}".format(file_uri) ) except Exception as dlerr: logger.error("Failed to download from: {0}".format(file_uri)) logger.exception(dlerr) continue logger.debug("Failed to download.") return False
def get_cache_dir(): """Gets yum's cache directory. Cache directory is used for repo metadata. Returns: - Cache directory full path. None otherwise. """ try: if yum_vars: _dir = _etc_yum.get(_etc_main_section, 'cachedir').replace( '$basearch', yum_vars['basearch']) _dir = _dir.replace('$releasever', yum_vars['releasever']) return _dir raise Exception("Oops. 'yum'vars' not found.") except Exception as e: logger.error("Could not find yum's cache directory.") logger.exception(e) return None
def login(): """ 利用config中的userame和password登陆 返回是否成功 """ logger.info("login") if Azkaban.sid: return True ok = False for i in range(config["retry"]): try: params = { 'action': "login", 'username': config["username"], 'password': config["password"] } r = requests.post(config["manager"], params) content = r.text debug_print(content) Azkaban.sid = str(json.loads(content)["session.id"]) ok = True break except Exception as e: logger.exception("login exception: {e}".format(e=e)) logger.info("login update sid {0}".format(Azkaban.sid)) return ok
def fetch_project_flows(self, project): """ 获取project的flowid 返回 flowid list """ method = "GET" url = config["manager"] params = { "ajax": "fetchprojectflows", "project": project } content, ok = self.get_ajax(method, url, params) flowids = [] if ok: try: res = json.loads(content) flowids = map(lambda e: e["flowId"], res["flows"]) except Exception as e: logger.exception(e) return [] logger.info( "fetch_project_flows {0} : {1} SUCCESS".format( project, flowids)) return flowids else: logger.info("fetch_project_flows {0} FAILED".format(project)) return flowids
def _yum_local_update(self, package_name, packages_dir, proc_niceness): logger.debug('Installing {0}'.format(package_name)) #TODO: figure out if restart needed restart = 'false' rpms = glob.glob(os.path.join(packages_dir, '*.rpm')) cmd = [ 'nice', '-n', CpuPriority.niceness_to_string(proc_niceness), yum.yum_cmd, '--nogpgcheck', 'localupdate', '-y' ] cmd.extend(rpms) try: output, error = self.utilcmds.run_command(cmd) if error: raise Exception(error) except Exception as e: logger.error('Faled to install {0}'.format(package_name)) logger.exception(e) return 'false', str(e), restart logger.debug('Done installing {0}'.format(package_name)) return 'true', '', restart
def get_application(self, vendor_id): """ Returns an Application instance based on the 'vendor_id'. @param vendor_id: Vendor ID of the application. @return: An Application instance. """ try: with self._connection: cursor = self._connection.cursor() select_sql = "SELECT * FROM %s WHERE %s='%s'" % ( self._application_table, ApplicationColumn.VendorId, vendor_id) cursor.execute(select_sql) row = cursor.fetchone() app = self._get_application_from_row(row) except Exception as e: logger.error("Could not get application id %s." % vendor_id) logger.exception(e) app = None return app
def create_from_file(cls, proj, src_file): filename, extention = os.path.splitext(src_file) if extention not in cls.SRC_EXTENSIONS: return [] tag_docs = [] with open(src_file, 'r') as f: s = f.read() for comment in COMMENT_PATTERN.finditer(s): match = PHONELAB_DOC_PATTERN.search(comment.group('body')) if match is None: continue try: text = ' '.join([l.strip() for l in match.group( 'json').replace('*', '').splitlines()]) doc = json.loads(text) tag_docs.append(TagDoc(doc, proj, src_file, s.count('\n', 0, comment.start()) + 1)) except: logger.exception("Invalid doc string in file %s: %s" % (src_file, match.group('json'))) logger.info("JSON Text: %s" % (text)) continue return tag_docs
def read_plist_string(self, string): """ Reads data from a string. Keys in the plist files are used for the dict keys. @param string: A plist-format str type. @return: a python dict. None if error is encountered. """ try: plist = plistlib.readPlistFromString(string) return plist except Exception as e: logger.error("Failed to read plist from string.") logger.exception(e) return {}
def _download_catalogs(self): catalog_urls = [ 'http://swscan.apple.com/content/catalogs/index.sucatalog', 'http://swscan.apple.com/content/catalogs/index-1.sucatalog', 'http://swscan.apple.com/content/catalogs/others/index-leopard.merged-1.sucatalog', 'http://swscan.apple.com/content/catalogs/others/index-leopard-snowleopard.merged-1.sucatalog', 'http://swscan.apple.com/content/catalogs/others/index-lion-snowleopard-leopard.merged-1.sucatalog', 'http://swscan.apple.com/content/catalogs/others/index-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog', 'http://swscan.apple.com/content/catalogs/others/index-10.9-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog' ] for url in catalog_urls: filename = url.split('/')[-1] # with file extension. try: urllib.urlretrieve( url, os.path.join(self._catalog_directory, filename)) except Exception as e: logger.error("Could not download sucatalog %s." % filename) logger.exception(e)
def _request(self, data): try: resp = requests.post(self.backend_url, json=data, headers={ "X-JUDGE-SERVER-TOKEN": token, "Content-Type": "application/json" }, timeout=5).text except Exception as e: logger.exception(e) raise JudgeServiceError("Heartbeat request failed") try: # json.loads() 字符串-->字典 r = json.loads(resp) if r["error"]: raise JudgeServiceError(r["data"]) except Exception as e: logger.exception("Heartbeat failed, response is {}".format(resp)) raise JudgeServiceError("Invalid heartbeat response")
def __get_platform_access(self) -> [PlatformAccess, None]: """Returns PlatformAccess with valid agent's credentials (environ), None otherwise""" if not self.platform_access: try: login = os.environ[f'OWN_{self.name.upper()}_AGENT_LOGIN'] password = os.environ[ f'OWN_{self.name.upper()}_AGENT_PASSWORD'] self.platform_access = PlatformAccess(login, password) except KeyError as key_error: exception( self.name, f'Failed get credentials for {self.name}-agent. Error message: {str(key_error)}' ) except Exception as err: error( self.name, f'Some error occurred while establishing connection to the platform: {err}' ) return self.platform_access
def start_worker(func: Callable, return_list: List, tasks_queue: queue.Queue, time_limit_sec: float, start_time: float = time()) -> None: """ Temporary (time limited) runs a start_worker: execute the given function on the given queue. :param func: function to execute :param return_list: sharable Manager.list for returned data :param tasks_queue: queue.Queue for task storing :param time_limit_sec: limitation of time (in seconds) :param start_time: UNIX time :return: Nothing """ while True: if time() - start_time < time_limit_sec: try: item = tasks_queue.get() if item is None: break return_list.append(func(*item)) tasks_queue.task_done() except Exception as e: logger.exception(AGENT_UTILS_NAME, f'Error in start_worker: {e}\n{traceback.print_exc()}') tasks_queue.task_done() continue else: # Time exceed logger.info(AGENT_UTILS_NAME, 'Time for start_worker was exceed') try: while not tasks_queue.empty(): tasks_queue.get(block=False, timeout=0) tasks_queue.task_done() logger.debug(AGENT_UTILS_NAME, f'{tasks_queue.qsize()} tasks remain.') logger.debug(AGENT_UTILS_NAME, f'Tasks queue is empty for function {func.__name__}.') except queue.Empty: logger.info(AGENT_UTILS_NAME, 'Tasks queue is empty.') break except Exception as e: logger.exception(AGENT_UTILS_NAME, f'Error in start_worker. Message: {e}') break break
def create_twitter_credentials_doc(credentials_file_path: str): """ Adds document with credentials(if it doesn't exists), adds twitter credentials collection (if it doesn't exist) adds credentials to twitter collection :param credentials_file_path: file with credentials that need to be created: place all values in this order: #first twitter credentials kit access_token (1) acces_token_secret (1) customer_id (1) ecret_api (1) #second twitter credentials kit access_token (2) acces_token_secret (2) customer_id (2) ecret_api (2) ... :return: nothing """ credentials_key = os.environ['CREDENTIALS_DB_KEY'] + '_test' credentials_dict = copy.deepcopy(credentials_doc) if not os.path.isfile(credentials_file_path): logger.exception( UTILS, f'Wrong credentials file path: {credentials_file_path}') return None with open(credentials_file_path) as file: credentials_str = file.read() credentails_list = credentials_str.split('\n') for i in range(0, len(credentails_list), SIZE_OF_TWITTER_CREDENTIALS_KIT): credentials_kit = credentails_list[i:i + SIZE_OF_TWITTER_CREDENTIALS_KIT] credentials_value = { ACCESS_TOKEN_KEY: credentials_kit[0], ACCESS_TOKEN_SECRET_KEY: credentials_kit[1], CUSTOMER_ID_KEY: credentials_kit[2], SECRET_API_KEY: credentials_kit[3], TIME_BETWEEN_REQUESTS_KEY: TWITTER_TIME_BETWEEN_REQUESTS } credentials_dict[VALUE_KEY] = credentials_value db.common_db.document(credentials_key).collection(TWITTER_KEY).add( credentials_dict)
def remove(self, app_name): success = 'false' name = '%s.app' % app_name app_dir = os.path.join(self.apps_dir, name) # rm_cmd = [self.rm_cmd, '-r', '-f'] # Get the Bundle ID before deleting it. # This will delete prefs and caches using the bundle id. # plist_file = os.path.join(app_dir, 'Contents/Info.plist') # if os.path.exists(plist_file): # app_info = self.plist.convert_and_read_plist(plist_file) # if 'CFBundleIdentifier' in app_info: # bundle_id = app_info['CFBundleIdentifier'] # # path_name = os.path.join(self.lib_prefs_dir, bundle_id + '.*') # for file in glob.glob(path_name): # cmd_ = list(cmd) # cmd_.append(file) # subprocess.call(cmd) try: if os.path.exists(app_dir): shutil.rmtree(app_dir) success = 'true' error = '' else: success = 'false' error = 'Application %s not found.' % app_name except Exception as e: logger.error("Could not uninstall %s." % app_name) logger.exception(e) error = str(e) return success, error
def _get_dependencies(self, name, version, release, arch): #yum_update = yum.YumUpdate(name, version, release, arch, '') # TODO: architecture keeps giving problems. Duplicate # packages in available updates with different architectures, but # when checking yum update {package} you HAVE to have the right # architecture. yum_update = yum.YumUpdate(name, version, release, '', '') yum_deps = [] try: yum_deps = yum.get_needed_dependencies(yum_update) except Exception as e: logger.error("Failed to get depenencies for: {0}".format(name)) logger.exception(e) # When an error occurs in get_needed_dependencies (not an exception) # it might set yum_deps to None. Therefore an error occurs when # trying to iterate over it. if not yum_deps: return [] dep_list = [] for dep in yum_deps: version = dep.version if dep.release: version = '{0}-{1}'.format(version, dep.release) dep_dict = {} dep_dict['name'] = dep.name dep_dict['version'] = version dep_dict['app_id'] = hashlib.sha256("{0}{1}".format( dep.name, dep.version)).hexdigest() # TODO: find a solution. Getting duplicates of package # just different architecture. Should we only be listing one? # or both? if dep_dict not in dep_list: dep_list.append(dep_dict) return dep_list
def current_cpu_data(self): # Example output /usr/sbin/iostat: # disk0 cpu load average # KB/t tps MB/s us sy id 1m 5m 15m # 23.39 3 0.08 7 4 89 0.81 0.69 0.53 # Following logic is based on the above output. That no other # stat will appear before "cpu" that is named with two strings. # Such as "load average". try: cpu_index = None cmd = ['/usr/sbin/iostat'] io = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0] top_line = io.splitlines()[0] top_line = [l for l in top_line.split(' ') if l != ''] for i in range(len(top_line)): if top_line[i] == 'cpu': cpu_index = i * 3 # 3 becuase of the output in iostat break # uses 3 units per device. stats_line = io.splitlines()[2] stats_line = [l for l in stats_line.split(' ') if l != ''] stats = { MonitKey.User: stats_line[cpu_index], MonitKey.System: stats_line[cpu_index + 1], MonitKey.Idle: stats_line[cpu_index + 2] } except Exception as e: logger.error("Could not get mac cpu data.") logger.exception(e) stats = {MonitKey.User: 0, MonitKey.System: 0, MonitKey.Idle: 0} return stats
def edit_update_data(self, name, needs_restart=False): try: with self._connection: cursor = self._connection.cursor() values = ",".join([ "%s = '%s'" % (UpdateDataColumn.Name, name), "%s = '%s'" % (UpdateDataColumn.NeedsRestart, needs_restart) ]) logger.debug("Editing update %s." % name) cursor.execute("UPDATE %s SET %s WHERE %s = '%s'" % ( self._table, values, UpdateDataColumn.Name, name)) except Exception as e: logger.info("Could not edit update %s." % name) logger.exception(e)
def get_files(self) -> List[File]: """ Sends the request to the back-end, retrieving element's files :return: Files remotely retrieved for an element """ http_method = 'GET' detail = 'board' url = self.__url + '/files' values = {} try: headers = self.__platform_access.get_headers( http_method, url, values, detail) response = request(method=http_method, url=url, headers=headers) response_data = response.json() except HTTPError as error: logger.exception(OWN_ADAPTER_NAME, f'Couldn\'t get the element\'s files: {error}') return [] files = self.__create_files(response_data) return files
def update_problem(db, info): cursor = db.cursor() sql = 'select max(problem_id) from problem;' cursor.execute(sql) result = cursor.fetchone() new_id = result[0] + 1 # problem_id title description input output sample_input sample_output source time_limit memory_limit sql = "insert into problem (problem_id, title, description, input, output," \ "sample_input, sample_output, source, time_limit, memory_limit)" \ "VALUES ({problem_id}, '{title}', '{description}', '{input}', '{output}', " \ "'{sample_input}', '{sample_output}', '{source}', {time_limit}, {memory_limit});"\ .format(problem_id=new_id, title=info['title'], description=info['description'], input=info['input'], output=info['output'], sample_input=info['sample_input'], sample_output=info['sample_output'], source=info['source'], time_limit=info['time_limit'], memory_limit=info['memory_limit']) try: cursor.execute(sql) db.commit() except Exception as e: logger.exception(e) db.rollback()
def ftp_upload(self, path): try: if os.path.isfile(path): with open(path, "rb") as infile: self.myFTP.storbinary('STOR %s' % path, infile) return files = os.listdir(path) os.chdir(path) for f in files: print(f) if os.path.isfile(f): with open(f, "rb") as infile: self.myFTP.storbinary('STOR %s' % f, infile) elif os.path.isdir(f): self.myFTP.mkd(f) self.myFTP.cwd(f) self.ftp_upload(f) self.myFTP.cwd('..') os.chdir('..') except Exception as e: logger.exception(e)
def find_jokes(self) -> Tuple: """ Uses http://webknox.com/api for retrieving jokes. :return: A tuple with retrieved joke and status code. """ query = '' result = {} try: # === Get the input parameters === query = flask.request.form.get(REQ_QUERY_KEY, None) if not query: result = get_random_joke() return flask.make_response(jsonify(result), HTTPStatus.OK) result = get_search_joke(term=query) return flask.make_response(jsonify(result), HTTPStatus.OK) except ConnectionError as conn_err: logger.exception( JOKES_AGENT_NAME, f'Connection error while retrieving the results: {conn_err}') except TimeoutError as timeout_err: logger.exception( JOKES_AGENT_NAME, f'Timeout while retrieving the results: {timeout_err}') except Exception as excpt: logger.exception( JOKES_AGENT_NAME, f'API error for \'{query}\'. Exception message: {excpt}') return flask.make_response(jsonify(result), HTTPStatus.INTERNAL_SERVER_ERROR) return flask.make_response(jsonify(result), HTTPStatus.OK)
def load_plugins(self, plugin_dir): sys.path.append(plugin_dir) plugins = [] packages = {} for name in os.listdir(plugin_dir): if os.path.isdir(os.path.join(plugin_dir, name)): packages[os.path.join(plugin_dir, name)] = name modules = [] for package in packages: for _file in os.listdir(package): if _file[-3:] == '.py': modules.append("%s.%s" % (packages[package], _file[:-3])) imported_packages = set( [__import__(name) for name in modules if name not in sys.modules]) for package in imported_packages: for module in package.__dict__.values(): if inspect.ismodule(module): for _class in module.__dict__.values(): if inspect.isclass(_class) and \ _class.__module__ != 'agentplugin': try: plug = _class() if isinstance(plug, AgentPlugin): plugins.append(plug) except Exception as e: logger.debug( 'Unable to import module %s. Skipping.' % _class.__module__) logger.exception(e) return plugins
def getMessageBody(self, inviteCode): body = {'id': inviteCode} hardware = Hardware() if hardware.Serial and hardware.isRaspberryPi(): body['type'] = 'rpi' body['hardware_id'] = hardware.Serial else: hardware_id = hardware.getMac() if hardware_id: body['type'] = 'mac' body['hardware_id'] = hardware_id try: system_data = [] cayennemqtt.DataChannel.add(system_data, cayennemqtt.SYS_HARDWARE_MAKE, value=hardware.getManufacturer(), type='string', unit='utf8') cayennemqtt.DataChannel.add(system_data, cayennemqtt.SYS_HARDWARE_MODEL, value=hardware.getModel(), type='string', unit='utf8') config = Config(APP_SETTINGS) cayennemqtt.DataChannel.add(system_data, cayennemqtt.AGENT_VERSION, value=config.get( 'Agent', 'Version', __version__)) system_info = SystemInfo() capacity_data = system_info.getMemoryInfo((cayennemqtt.CAPACITY, )) capacity_data += system_info.getDiskInfo((cayennemqtt.CAPACITY, )) for item in capacity_data: system_data.append(item) body['properties'] = {} # body['properties']['pinmap'] = NativeGPIO().MAPPING if system_data: body['properties']['sysinfo'] = system_data except: exception('Error getting system info') return json.dumps(body)
def _load_install_data(self): """Parses the 'data' key to get the application info for install. Returns: A list of InstallData types. """ install_data_list = [] if RvOperationKey.FileData in self.json_message: data_list = self.json_message[RvOperationKey.FileData] else: data_list = [] try: for data in data_list: install_data = InstallData() install_data.name = data[RvOperationKey.Name] install_data.id = data[RvOperationKey.AppId] install_data.cli_options = \ data.get(RvOperationKey.CliOptions, '') install_data.proc_niceness = \ CpuPriority.get_niceness(self._get_cpu_priority()) if RvOperationKey.Uris in data: install_data.uris = data[RvOperationKey.Uris] install_data_list.append(install_data) except Exception as e: logger.error("Could not load install data.") logger.exception(e) return install_data_list
def process_operation(self, operation): try: if not isinstance(operation, SofOperation): operation = SofOperation(operation) logger.info("Process the following operation: {0}".format( operation.__dict__)) self._sqlite.add_operation(operation, datetime.datetime.now()) operation_methods = { OperationValue.SystemInfo: self.system_info_op, OperationValue.NewAgent: self.new_agent_op, OperationValue.Startup: self.startup_op, OperationValue.NewAgentId: self.new_agent_id_op, OperationValue.Reboot: self.reboot_op, OperationValue.Shutdown: self.shutdown_op } if operation.type in operation_methods: # Call method operation_methods[operation.type](operation) elif operation.plugin in self._plugins: self.plugin_op(operation) else: raise Exception('Operation/Plugin {0} was not found.'.format( operation.__dict__)) except Exception as e: logger.error("Error while processing operation: {0}".format( operation.__dict__)) logger.exception(e) self._major_failure(operation, e)
def post(self): args = self.reqparse.parse_args() ip = request.remote_addr try: result = self.judge(args, ip) print result return {'code': 0, 'result': result} except CompileError as e: logger.exception(e) ret = dict() ret["err"] = e.__class__.__name__ ret["data"] = e.message result = { "status": "compile error", "info": ret, } return {'code': 0, 'result': result} except (JudgeServerError, SandboxError) as e: logger.exception(e) ret = dict() ret["err"] = e.__class__.__name__ ret["data"] = e.message return {'code': 1, 'result': ret} except Exception as e: logger.exception(e) ret = dict() ret["err"] = "JudgeClientError" ret["data"] = e.__class__.__name__ + ":" + e.message return {'code': 2, 'result': ret}
def login(self): try: self.browser.open(CF.URL_LOGIN) enter_form = self.browser.get_form('enterForm') except Exception as e: logger.exception(e) logger.error("Open url failed.") return False enter_form['handle'] = self.user_id enter_form['password'] = self.password try: self.browser.submit_form(enter_form) except Exception as e: logger.exception(e) logger.error("Submit login form failed.") return False try: checks = list( map(lambda x: x.getText()[1:].strip(), self.browser.select('div.caption.titled'))) if self.user_id not in checks: logger.warning("Login failed, probably incorrect password.") return False except Exception as e: logger.exception(e) logger.error("Login status check failed.") return False return True
def get_data_from_file(self, detail: str, additional_headers: Dict) -> Optional[Dict]: """ Returns File data from board :param detail: Content-Type part in headers. Can be: chart, htmlReference... :param additional_headers: Dict with additional headers parameters For chart and htmlReference: { 'Content-Type': 'application/json' } :return: Dict with requested data or None if request broken """ result = None if self.__identifier and self.__identifier != '0': response = None try: http_method = 'GET' values = {} headers = self.__platform_access.get_headers( http_method, self.get_url(), values, detail, additional_headers=additional_headers) response = requests.get(self.get_url(), headers=headers) response.raise_for_status() result = json.loads(response.content) except requests.HTTPError as e: logger.exception( OWN_ADAPTER_NAME, f'Error: couldn\'t get {detail} form file: {self.get_url()}. Error type: {e}.' f' Response: {response.status_code}', response=response) return None except Exception as e: logger.exception( OWN_ADAPTER_NAME, f'Error: couldn\'t get {detail} form file: {self.get_url()}. Error type: {e}', response=response) return None return result
def get_cpu_temp(): """Get CPU temperature""" info = {} thermal_dirs = glob('/sys/class/thermal/thermal_zone*') thermal_dirs.sort() temp = 0.0 try: for thermal_dir in thermal_dirs: try: thermal_type = '' with open(thermal_dir + '/type', 'r') as type_file: thermal_type = type_file.read().strip() if thermal_type != 'gpu_thermal': with open(thermal_dir + '/temp', 'r') as temp_file: content = int(temp_file.read().strip()) temp = content / 1000.0 break except: pass except Exception: exception('Error getting CPU temperature') return temp
def fetch_running_execution(self, project, flow): method = "GET" url = config["executor"] params = {"ajax": "getRunning", "project": project, "flow": flow} content, ok = self.get_ajax(method, url, params) ret = [] if ok: debug_print(content) logger.info( "fetch running execution from {} {} success: {}.".format( project, flow, content)) try: ret = json.loads(content) except Exception as e: logger.exception(e) else: logger.info( "fetch running execution from {} {} failed: {}.".format( project, flow)) return ret, ok
def getCpuInfo(self): """Get CPU information as a list formatted for Cayenne MQTT Returned list example:: [{ 'channel': 'sys:cpu;load', 'value': 12.8, 'type': 'cpuload', 'unit': 'p' }, { 'channel': 'sys:cpu;temp', 'value': 50.843, 'type': 'temp', 'unit': 'c' }] """ cpu_info = [] try: cayennemqtt.DataChannel.add(cpu_info, cayennemqtt.SYS_CPU, suffix=cayennemqtt.LOAD, value=psutil.cpu_percent(1), type='cpuload', unit='p') # cayennemqtt.DataChannel.add(cpu_info, cayennemqtt.SYS_CPU, suffix=cayennemqtt.TEMPERATURE, value=CpuInfo.get_cpu_temp(), type='temp', unit='c') except: exception('Error getting CPU info') return cpu_info
def save_queue(queue, file_path, protocol=-1): """ Current way of saving queue is to take all items inside the queue and dump them into a list, this list is pickled into a file. """ queue_dump = queue.queue_dump() # TODO: fix this hack!? savable_dump = [] for op in queue_dump: if isinstance(op, SofOperation) or isinstance(op, ResultOperation): if op._is_savable(): savable_dump.append(op) else: try: logger.debug("{0} / {1} will not be saved.".format( op.id, op.type)) except Exception: logger.debug("{0} will not be saved.".format(op)) continue savable_dump.append(op) try: with open(file_path, 'w') as _file: cPickle.dump(savable_dump, _file, protocol) os.chmod(file_path, 0600) except Exception as e: logger.error("Failed to dump queue to file: {0}".format(file_path)) logger.exception(e) if os.path.exists(file_path): os.remove(file_path)
def kek_add(message): add_text = '' add_id = '' # Tries to write new data to either a file with text or a file with ids try: with open(kek_text_name, 'a+', encoding='utf-8') as kek_text, open( kek_ids_name, 'a+', encoding='utf-8') as kek_ids: if getattr(message, 'text'): add_text = message.text kek_text.write('\n' + add_text) elif getattr(message, 'sticker'): add_id = '<sticker>{}'.format(message.sticker.file_id) elif getattr(message, 'audio'): add_id = '<audio>{}'.format(message.audio.file_id) elif getattr(message, 'voice'): add_id = '<voice>{}'.format(message.voice.file_id) elif getattr(message, 'photo'): add_id = '<photo>{}'.format(message.photo[0].file_id) if not (add_text or add_id): bot.reply_to(message, 'Sorry, couldn\'t add your kek. Only text, photos, stickers, audio and voice files '\ 'are supported by now.') logger.info('Couldn\'t add a kek') return kek_ids.write('\n' + add_id) logger.info('Admin {0} added this kek: {1}{2}'.format( get_user(message.from_user), add_text, add_id)) # If the files are nonexistent except OSError as e: bot.reply_to( message, 'Can\'t access the kek files. Please, make sure that they are in \'data\' directory and they are non-empty.' ) logger.info(e) except Exception as e: logger.exception(e)
def accept_connections(self): """ Accept connections from multiple clients and save to list """ for c in self.all_connections: c.close() self.all_connections = [] self.all_addresses = [] while 1: try: conn, address = self.socket.accept() conn.setblocking(1) client_hostname = conn.recv(1024).decode("utf-8") address = address + (client_hostname,) except Exception as e: logger.exception(e) print('Error accepting connections: %s' % str(e)) # Loop indefinitely continue self.all_connections.append(conn) self.all_addresses.append(address) logger.info('Connection has been established: {0} ({1})'.format(address[-1], address[0])) print('\nConnection has been established: {0} ({1})'.format(address[-1], address[0])) return
def run(self): """Send messages to the server until the thread is stopped""" debug('WriterThread run') index = 0 while self.Continue: # debug('WriterThread run') # self.cloudClient.mqttClient.publish_packet("topic", "message index:"+str(index)) # index +=1 # # print("message index:"+str(index)) # self.cloudClient.EnqueuePacket("{},index={}".format(self.cloudClient.get_scheduled_events(),str(index))) try: if self.cloudClient.exiting.wait(GENERAL_SLEEP_THREAD): return if self.cloudClient.mqttClient.connected == False: info('WriterThread mqttClient not connected') continue got_packet = False topic, message = self.cloudClient.DequeuePacket() if topic or message: got_packet = True try: if message or topic == cayennemqtt.JOBS_TOPIC: # debug('WriterThread, topic: {} {}'.format(topic, message)) if not isinstance(message, str): message = dumps(message) self.cloudClient.mqttClient.publish_packet(topic, message) message = None except: exception("WriterThread publish packet error") finally: if got_packet: self.cloudClient.writeQueue.task_done() except: exception("WriterThread unexpected error") return
def get_my_ip() -> str: """ Get an ip address of the machine :return: an ip """ local = None try: local = os.environ[f'USE_LOCAL_IP'] except KeyError as e: pass if local: return get_my_local_ip() response = None try: response = requests.get('https://api.ipify.org') response.raise_for_status() return response.text except Exception as e: logger.exception(AGENT_UTILS_NAME, f'Could not get an ip address. Error {e}', response) return ''
async def update_group_badges(sid: str, member_badges: List[MemberBadge]): pr: PlayerRoom = game_state.get(sid) for member in member_badges: try: shape = Shape.get_by_id(member["uuid"]) except Shape.DoesNotExist: logger.exception( f"Could not update shape badge for unknown shape {member['uuid']}" ) else: shape.badge = member["badge"] shape.save() for psid, player in game_state.get_users(room=pr.room): await sio.emit( "Group.Members.Update", member_badges, room=psid, skip_sid=sid, namespace=GAME_NS, )