def _create_direcotry(self): if self._dbPath: directory = os.path.abspath(self._dbPath) directory = os.path.split(directory)[0] Common.create_dir(directory) else: Log.error('empty db path')
def __init__(self, files, distdir): super(Package, self).__init__() self.files = files self.distdir = distdir Common.create_dir(distdir) Log.debug('packages: %s' % files) Log.debug('distdir: %s' % distdir)
def download_file(self, local_path, url, headers=None, data=None, method=None): if not url or not local_path: return directory = Common.split_path(local_path)[0] Common.create_dir(directory) Common.remove(local_path) content = self.request_data(url, headers=headers, data=data, method=method) if content: Common.write_data(local_path, content)
def backup(src, dst_dir, retemtion_days, hours_last_day=None, ignore_hours=None): if hours_last_day is None: hours_last_day = 8 if should_ignore_hours(ignore_hours): return name = Common.filename(src) + '_' + datetime.datetime.now().strftime( '%Y%m%d%H' + Common.file_extension(src)) dst = Common.join_paths(dst_dir, name) Log.debug('backup %s to %s' % (src, dst)) Common.create_dir(dst_dir) cmd = 'rsync -aE --progress %s %s' % (src, dst) Common.system_cmd(cmd) # delete older backups arr = [x for x in Common.list_dir(dst_dir) if x != '.DS_Store'] for x in arr: name = Common.filename(x) t = name.split('_') if t and len(t) > 1: dt = datetime.datetime.strptime(t[-1], '%Y%m%d%H') days = (datetime.datetime.now() - dt).days should_delete = False if days >= 1: if days in retemtion_days: if dt.hour < 23: should_delete = True else: should_delete = True elif days == 0 and dt.hour < 23 and ( datetime.datetime.now() - dt).seconds > hours_last_day * 60 * 60: should_delete = True if should_delete: file = Common.join_paths(dst_dir, x) Common.remove(file)