def piped_execute(cmd1, cmd2): """Pipe output of cmd1 into cmd2.""" logger.debug("Piping cmd1='%s' into...", ' '.join(cmd1)) logger.debug("cmd2='%s'", ' '.join(cmd2)) try: p1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError as e: logger.error("Pipe1 failed - %s ", e) raise # NOTE(dosaboy): ensure that the pipe is blocking. This is to work # around the case where evenlet.green.subprocess is used which seems to # use a non-blocking pipe. flags = fcntl.fcntl(p1.stdout, fcntl.F_GETFL) & (~os.O_NONBLOCK) fcntl.fcntl(p1.stdout, fcntl.F_SETFL, flags) try: p2 = subprocess.Popen(cmd2, stdin=p1.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError as e: logger.error("Pipe2 failed - %s ", e) raise p1.stdout.close() stdout, stderr = p2.communicate() return p2.returncode, stderr
def es_store_record(record): try: logger.debug(f"es_store_record in index: {es_index_name} record {record}") es.index(index=es_index_name, body=record) except Exception as ex: logger.error(ex, exc_info=True)
def diff_transfer(src_name, src_pool, src_user, src_conf, src_keyring, dest_name, dest_pool, dest_user, dest_conf, dest_keyring, src_snap=None, from_snap=None): """Copy only extents changed between two points. If no snapshot is provided, the diff extents will be all those changed since the rbd volume/base was created, otherwise it will be those changed since the snapshot was created. """ logger.debug("Performing differential transfer from '%(src)s' to " "'%(dest)s'", {'src': src_name, 'dest': dest_name}) src_ceph_args = make_ceph_args(src_user, src_conf, pool=src_pool, keyring=src_keyring) dest_ceph_args = make_ceph_args(dest_user, dest_conf, pool=dest_pool, keyring=dest_keyring) cmd1 = ['rbd', 'export-diff'] + src_ceph_args if from_snap is not None: cmd1.extend(['--from-snap', from_snap]) if src_snap: path = "%s/%s@%s" % (src_pool, src_name, src_snap) else: path = "%s/%s" % (src_pool, src_name) cmd1.extend([path, '-']) cmd2 = ['rbd', 'import-diff'] + dest_ceph_args rbd_path = "%s/%s" % (dest_pool, dest_name) cmd2.extend(['-', rbd_path]) ret, stderr = process_util.piped_execute(cmd1, cmd2) if ret: msg = "RBD diff op failed - (ret=%(ret)s stderr=%(stderr)s)" % {'ret': ret, 'stderr': stderr} logger.info(msg)
def execute(cmd): cmd = [str(c) for c in cmd] logger.debug("Execute cmd='%s'", ' '.join(cmd)) try: p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return p.communicate() except Exception as ex: logger.error("Execute cmd error %s", ex)
def _connect(self): logger.debug("connect") try: self.conn = mariadb.connect(user=self.user, password=self.pwd, host=self.host, port=3307, database="rigodetools") except Exception as ex: logger.error(ex, exc_info=True) print(ex)
def download_queue_oday(self, directory): logger.debug("download start: bt") self.ftp.cwd("//MP3") self.ftp.cwd("0-DAY") self.ftp.cwd(directory) if not os.path.exists(self.download_root + directory): os.makedirs(self.download_root + directory) timer = Timer(text="Track downloaded in {:0.2f} seconds", logger=logger.info) for ftpfile in self.queue_oday: #print("Current directory {}".format(self.ftp.pwd())) #print("cwd to {}".format(ftpfile.directory)) self.ftp.cwd(ftpfile.directory) for filename in (path for path in self.ftp.nlst() if path not in ( '.', '..')): #first entry is always a sub directory logger.debug(f"Checking filename {filename}") destination_dir = os.path.join(self.download_root, directory, ftpfile.group, ftpfile.directory) if not os.path.exists(destination_dir): os.makedirs(destination_dir) local_filename = os.path.join( destination_dir, filename.replace('-www.groovytunes.org', '')) if (filename.startswith('-[')): os.makedirs(local_filename) else: if not os.path.exists(local_filename): if (ftpfile.size < 52914560): logger.info( f"Downloading {filename} to {local_filename} with size {ftpfile.size}" ) timer.start() file = open(local_filename, 'wb') self.ftp.retrbinary('RETR ' + filename, file.write) file.close() timer.stop() else: logger.warn(f"Skip oversized file {filename}") else: logger.warn(f"File already exists {local_filename}.") clean_download_directory(destination_dir) self.ftp.cwd("..")
def wait_evacuate(self): logger.debug("等待疏散节点 %s 完成任务" % self.node) # TODO: wait node status # while True: servers = self.client.list_servers(self.node) evacuating = filter(lambda x: x.status == u"ACTIVE", servers) if len(evacuating) == 0: break time.sleep(1)
def add_daily_detail(self, day, directory_entry, kind): if (self.conn is None): self._connect() cursor = self.conn.cursor() try: logger.debug( f"INSERT INTO dailydetails (day,directory,kind) VALUES ({self.year + day}, {directory_entry}, {kind})" ) cursor.execute( "INSERT INTO dailydetails (day,directory,kind) VALUES (?, ?, ?)", (self.year + day, directory_entry, kind)) self.conn.commit() finally: cursor.close()
def __init__(self): config_object = ConfigParser() config_object.read( os.path.join(os.path.dirname(os.path.realpath(__file__)), "config.ini")) serverinfo = config_object["CONFIG"] self.host = serverinfo["MARIADB_HOST"] self.user = serverinfo["MARIADB_USER"] self.pwd = serverinfo["MARIADB_PWD"] self.conn = None self.year = str(datetime.datetime.now().year) logger.debug(f"host {self.host}") logger.debug(f"user {self.user}")
def list_0day_directory(self, directory): path = "/MP3/0-DAY/" self.ftp.cwd(path) self.ftp.cwd(directory) for entry in (path for path in self.ftp.nlst() if path not in ('.', '..')): try: logger.debug("Entering directory: " + entry) self.ftp.cwd(entry) ftpFile = FtpFile("0-DAY", entry, path + directory) largest = 0 for filename in (path for path in self.ftp.nlst()[1:] if path not in ('.', '..') ): #first entry is always a sub directory logger.debug("entry " + entry) logger.debug("sub_entry " + filename) logger.debug("path " + path) song_logger.info( directory + "," + filename.replace('-www.groovytunes.org', '')) size = self.ftp.size(filename) if (largest < size): largest = size ftpFile.size = size es_store_record(ftpFile.toDict()) result = [ fav_element for fav_element in banned if fav_element.upper() in entry.upper().replace(' ', '_') ] if (len(result) == 0): result = [ fav_element for fav_element in favourites if fav_element.upper() in entry.upper().replace( ' ', '_') ] if (len(result) > 0): if (len( difflib.get_close_matches( entry, self.matcher_list)) == 0): logger.info(f"Adding to 0-day q {entry}") self.queue_oday.append(ftpFile) self.matcher_list.append(entry) self.ftp.cwd('..') except Exception: logger.error("Listing error: ", exc_info=True)
def clean_download_directory(directory): logger.debug(f"Cleaning directory: {directory}") for filename in os.listdir(directory): fullpath = os.path.join(directory,filename) if (os.path.isfile(fullpath)): if (filename.endswith('.mp3')): try: length_minutes, bitrate = get_audio_info(fullpath) if (length_minutes > 25) or (length_minutes < 4) or (bitrate < 192): os.remove(fullpath) except Exception as ex: logger.error(f"Audio info couldn't be determined. Keeping the file {fullpath}") else: clean_download_directory(fullpath) if not [f for f in os.listdir(directory) if f.endswith('.mp3')]: shutil.rmtree(directory) else: generate_m3u(directory)
def build_mail_evacuate(self): logger.debug("生成邮件正文") servers = self.client.list_servers(self.node) evacuate_errors = filter(lambda x: x.status == u"ERROR", servers) if len(evacuate_errors) == 0: subject = u"疏散计算节点%s成功" % self.node content = subject else: subject = u"疏散计算节点%s失败" % self.node title_list = [u"失败虚机", u"IP"] record_list = [] for server in evacuate_errors: ip = None for name, vif in server.addresses.items(): ip = vif[0]['addr'] record_list.append((server.name, ip)) content = mailutil.records2html(subject, title_list, record_list) return subject, content
def es_create_index(): settings = { "settings": { "number_of_shards": 1, "number_of_replicas": 0 }, "mappings": { "members": { "dynamic": "strict", "properties": { "group": { "type": "text" }, "date": { "type": "date", "format": "yyyy.MM.dd HH:mm:ss" }, "directory": { "type": "text" }, "filename": { "type": "text" }, "full_filename": { "type": "text" }, "pretty_filename": { "type": "text" } } } } } try: if not es.indices.exists(es_index_name): es.indices.create(index=es_index_name, ignore=400, body=settings) logger.debug('Index created') except Exception as ex: logger.error(ex, exc_info=True)
def download_queue_bt(self, directory): logger.debug("download start: bt") self.ftp.cwd("//MP3") self.ftp.cwd("BEATPORT__AND__WEBSITE_SECTION") self.ftp.cwd(directory) if not os.path.exists(self.download_root + directory): os.makedirs(self.download_root + directory) timer = Timer(text="Track downloaded in {:0.2f} seconds", logger=logger.info) for ftpfile in self.queue_bt: self.ftp.cwd(ftpfile.directory) logger.debug(f"Listing directory {ftpfile.directory}") for filename in (path for path in self.ftp.nlst() if path not in ('.', '..')): logger.debug(f"Checking filename {filename}") destination_dir = os.path.join(self.download_root, directory, ftpfile.group, ftpfile.directory) if not os.path.exists(destination_dir): os.makedirs(destination_dir) local_filename = os.path.join( destination_dir, filename.replace('-www.groovytunes.org', '').replace('_', ' ')) if not os.path.exists(local_filename): if (ftpfile.size < 52914560): logger.info( f"Downloading {filename} to {local_filename}") timer.start() file = open(local_filename, 'wb') self.ftp.retrbinary('RETR ' + filename, file.write) file.close() timer.stop() else: logger.warn(f"Skip oversized file {filename}") else: logger.info(f"File already exists {local_filename}.") clean_download_directory(destination_dir) self.ftp.cwd("..")
from loghelper import logger from datetime import datetime, timedelta from ftphelper import FtpCrawler from codetiming import Timer import sys try: today = datetime.now() - timedelta(1) today_directory = today.strftime("%m%d") if (len(sys.argv) == 2): today_directory = sys.argv[1] logger.debug(today_directory) print("Crawling {}".format(today_directory)) timer = Timer("ftp", text="Finished in {minutes:.1f} minutes") with FtpCrawler() as ftpcrawler: timer.start() ftpcrawler.list_beatport_directory(today_directory) ftpcrawler.list_0day_directory(today_directory) ftpcrawler.download_queue_bt(today_directory) ftpcrawler.download_queue_oday(today_directory) timer.stop() download_time = Timer.timers["ftp"] except Exception as ex: logger.error(ex, exc_info=True)
def send_mail(self, subject, content): logger.debug("发送邮件") to_list = config.TO_LIST.split(',') mailutil.send_html_mail(config.MAIL_HOST, config.MAIL_PORT, config.MAIL_USER, config.MAIL_PWD, config.MAIL_FROM, to_list, subject, content)
def main(): logger.debug("开始运行邮件程序") NovaMailShell().main()
def list_beatport_directory(self, directory): path = "/MP3/BEATPORT__AND__WEBSITE_SECTION/" self.ftp.cwd(path) self.ftp.cwd(directory) for entry in (path for path in self.ftp.nlst() if path not in ('.', '..')): try: logger.debug("Entering directory: {}".format(entry)) self.ftp.cwd(entry) ftpFile = FtpFile("BEATPORT__AND__WEBSITE_SECTION", entry, path + directory) largest = 0 for filename in (path for path in self.ftp.nlst() if path not in ('.', '..')): logger.debug("entry {}".format(entry)) logger.debug("sub_entry {}".format(filename)) logger.debug("path {}".format(path)) song_logger.info( directory + "," + filename.replace('-www.groovytunes.org', '')) logger.debug("getting timestamp of {}".format(filename)) size = self.ftp.size(filename) if (largest < size): largest = size ftpFile.size = size logger.debug(ftpFile) es_store_record(ftpFile.toDict()) result = [ fav_element for fav_element in banned if fav_element.upper() in entry.upper().replace(' ', '_') ] if (len(result) == 0): result = [ fav_element for fav_element in favourites if fav_element.upper() in entry.upper().replace( ' ', '_') ] if (len(result) > 0): if (len( difflib.get_close_matches( entry, self.matcher_list)) == 0): logger.info(f"Adding to BT q {entry}") self.queue_bt.append(ftpFile) self.matcher_list.append(entry) self.ftp.cwd('..') except Exception: logger.error("Listing error: ", exc_info=True)
def __exit__(self, type, value, traceback): logger.debug("Downloaded BT {}".format(len(self.queue_bt))) logger.debug("Downloaded 0Day {}".format(len(self.queue_oday))) self.ftp.close()
def execute(*cmd, **kwargs): """Helper method to shell out and execute a command through subprocess.""" process_input = kwargs.pop('process_input', None) check_exit_code = kwargs.pop('check_exit_code', [0]) ignore_exit_code = False delay_on_retry = kwargs.pop('delay_on_retry', True) attempts = kwargs.pop('attempts', 1) shell = kwargs.pop('shell', False) loglevel = kwargs.pop('loglevel', logging.DEBUG) if isinstance(check_exit_code, bool): ignore_exit_code = not check_exit_code check_exit_code = [0] elif isinstance(check_exit_code, int): check_exit_code = [check_exit_code] if kwargs: raise UnknownArgumentError('Got unknown keyword args ' 'to utils.execute: %r' % kwargs) cmd = map(str, cmd) while attempts > 0: attempts -= 1 try: logger.debug('Running cmd (subprocess): %s', ' '.join(cmd)) _PIPE = subprocess.PIPE if os.name == 'nt': preexc_fn = None close_fds = False else: preexc_fn = _subprocess_setup close_fds = True obj = subprocess.Popen(cmd, stdin=_PIPE, stdout=_PIPE, stderr=_PIPE, close_fds=close_fds, preexec_fn=preexc_fn, shell=shell) result = None if process_input is not None: result = obj.communicate(process_input) else: result = obj.communicate() obj.stdin.close() _returncode = obj.returncode if _returncode: logger.debug('Result was %s' % _returncode) if not ignore_exit_code and _returncode not in check_exit_code: (stdout, stderr) = result raise ProcessExecutionError(exit_code=_returncode, stdout=stdout, stderr=stderr, cmd=' '.join(cmd)) return result except ProcessExecutionError: if not attempts: raise else: logger.log(loglevel, '%r failed. Retrying', cmd) if delay_on_retry: time.sleep(random.randint(20, 200) / 100.0) finally: time.sleep(0)