def exec(self, command): """ connect to host, and executes given command """ client = None try: client = self._make_client() stdin, stdout, stderr = client.exec_command('sh -ls', timeout=TIMEOUT) stdin.channel.send(command + '\n') stdin.channel.shutdown_write() stdin.close() channel = stdout.channel # shared channel for stdout/stderr/stdin while not channel.exit_status_ready(): # flush stdout and stderr while the remote program has not # returned. Read by blocks of max 16KB. if channel.recv_ready(): stdout.channel.recv(1 << 14) if channel.recv_stderr_ready(): stderr.channel.recv_stderr(1 << 14) time.sleep(0.1) # sleep 100 ms stdout.close() stderr.close() ret = channel.recv_exit_status() finally: if client: client.close() if ret: errmsg = 'SSH error executing {}'.format(command) log_error(errmsg) raise Exception(errmsg)
def __format(self, time_stamp, tm_format): time_str = '' if tm_format is None or len(tm_format) == 0: return time_stamp try: time_str = time.strftime(tm_format, time.localtime(time_stamp)) except Exception, err: log_error('DateTimeExtractor::__format err %s %s' % (err,str(time_stamp)))
def _send_cmd(self, cmd: str): # Write newline terminated command to stdin of child print(cmd, file=self.proc.stdin, flush=True) # Parse result from stdout of child resline = self.proc.stdout.readline() res = resline.strip('\n').split(maxsplit=1) msg = res[1:] if res[0] != 'OK': errmsg = 'Failed to {} in {}: {}'.format(cmd, self.name, msg) log_error(errmsg) raise RuntimeError(errmsg)
def run(self): self.gerrit_instance.startWatching() while True: try: event = self.gerrit_instance.getEvent() except Exception as err: # pylint: disable=broad-except # an error occurred, but NOT one involving package # generation just let slide, it may be caused by a hiccup # in the infrastructure. log_error('ignoring exception {}'.format(str(err))) continue self._handle_gerrit_event(event)
def _process_job(self, scheduled_job: _BuildScheduledJob): builder = self.builder job = scheduled_job.job success = True log_info(f'{job} build started on {builder}') try: builder.build(job) msg = f'{job} build on {builder}: succeed' log_info(msg) except Exception as exception: # pylint: disable=broad-except msg = f'{job} build on {builder} failed: {str(exception)}' log_error(msg) success = False scheduled_job.build_done(success, msg)
def put(self, local_file_path, remote_path): """ Copy a local file to given remote path """ client = None try: client = self._make_client() sftp_client = client.open_sftp() sftp_client.put(local_file_path, remote_path, confirm=False) except Exception as exception: # pylint: disable=broad-except errmsg = 'SSH error sending {} to {} in {}\n' \ .format(local_file_path, self, remote_path) errmsg += 'Error: ' + str(exception) log_error(errmsg) finally: if client: client.close()
def ping(self): while self.status: try: time.sleep(60) # 尝试向redis-server发一条消息 if not self.conn.ping(): log_info("redis连接丢失,重新获取连接") self.conn = redis.Redis(host=my_host, port=my_port, db=my_db, password=my_pw) # self.conn = self.myredis.getConnect() # self.redis_sub = self.get_psubscribe() else: log_info("发送心跳成功") log_info(self.conn) except Exception as e: log_error("redis连接异常,发送心跳失败,重新连接") log_error(e)
def get(self, remote_dir_path, local_dir_path): """ Copy a remote file to given local path Expects both remote and local arguments to be directories, not files. """ client = None try: client = self._make_client() sftp_client = client.open_sftp() self._rec_download(sftp_client, remote_dir_path, local_dir_path) except Exception as exception: # pylint: disable=broad-except errmsg = 'SSH error retrieving {} from {} into {}' \ .format(remote_dir_path, self, local_dir_path) errmsg += 'Error: ' + str(exception) log_error(errmsg) finally: if client: client.close()
def perform_scanner(configuration, collection_name): log_info('***** Scanning collection {} *****'.format(collection_name)) if collection_name not in configuration.collections: log_error( 'Collection "{}" not found in the configuration file.'.format( collection_name)) sys.exit(1) collection_conf = configuration.collections[collection_name] # Load DAT file. DAT_dir_FN = FileName(configuration.common_opts['NoIntro_DAT_dir']) DAT_FN = DAT_dir_FN.pjoin(collection_conf['DAT']) DAT = common.load_XML_DAT_file(DAT_FN) # Scan files in ROM_dir. collection = ROMcollection(collection_conf) collection.scan_files_in_dir() collection.process_files(DAT) return collection
def generate_buildjobs(req: BuildRequest) -> Iterator[BuildJob]: """ Generate the mmpack source packages from a build request """ log_info(f'making source packages for {req}...') with TemporaryDirectory(prefix='mmpack-src') as tmpdir: args = [ 'mmpack-build', '--outdir=' + tmpdir, '--builddir=' + tmpdir + '/build', 'mksource', '--git', '--tag=' + req.fetch_refspec, ] if req.srctar_make_opts.get('version_from_vcs', False): args.append('--update-version-from-vcs') if req.srctar_make_opts.get('only_modified', True): args.append('--multiproject-only-modified') args.append(req.url) proc = Popen(args, stdout=PIPE, encoding='utf-8') num_prj = 0 for line in proc.stdout: fields = line.strip().split() if len(fields) != 3: break job = BuildJob(req, fields[0], fields[1], fields[2]) num_prj += 1 log_info(f'... {job.prj_name} {job.version} {job.srchash}') yield job if proc.wait() != 0: log_error(f'{args} failed') else: log_info('... Done' if num_prj else 'No mmpack packaging')
def _process_build_done(self, job: BuildJob, success: bool, feedback: str): if not success: job.notify_result(False, feedback) return if not job.do_upload: job.notify_result(True, f'{job}: upload skipped') return modified_repos = [] # begin changes in modified repositories (this gets the lock on those) for arch in job.archs: repo = self.repos[job.upload_repo][arch] repo.begin() modified_repos.append(repo) try: # Update repositories manifest = job.merge_manifests() for repo in modified_repos: repo.add(manifest) except Exception as exception: # pylint: disable=broad-except # Rollback changes in modified repositories for repo in modified_repos: repo.rollback() job.notify_result(False, str(exception)) log_error(f'{job} upload cancelled') return # Commit changes in modified repositories for repo in modified_repos: repo.commit() log_info(f'{job} uploaded on {repo}') job.notify_result(True)
def run(self): log_info("thread redis start....") while self.status: try: self.redis_sub = self.get_psubscribe() except Exception as e: log_error("订阅失败,5秒后重试") log_error(e) time.sleep(5) continue break while self.status: try: msg = self.redis_sub.parse_response(block=False, timeout=60) log_info("收到订阅消息 %s" % msg) if msg is not None and msg[0] != 'psubscribe': # self.data_list.append(msg[3]) phoneList.append(msg[3]) # print(phoneList.getData()) except Exception as e: log_error("redis异常,重新检测连接") log_error(e) time.sleep(5) log_info("thread redis end....")