def __init__(self): self.webhook = Webhook() self.download_dir = utils.base_path() / "download" self.backup_dir = utils.base_path() / "backup" self.patch_list = self.webhook.query_patch_list(self.app_id) self.depot_list = self.__get_depot_list() self.process_queue = Queue()
def __init__(self): self.webhook = Webhook() # The earliest patch that works was released after directx update # @TODO Try to figure out a way to patch to earlier patches than this #self.directx_update_date = time.struct_time((2020, 2, 17, 0, 0, 0, 0, 48, 0)) self.download_dir = utils.base_path() / "download" self.backup_dir = utils.base_path() / "backup" self.patch_list = self.webhook.query_patches() self.depot_list = self._get_depot_list() self.process_queue = Queue()
def get_current_etag(): path = base_path('etag.dat') try: with open(path, 'r') as fh: etag = fh.read() except: return None return etag
def update(): etag = get_current_etag() # req = Request('http://www.karpenoktem.nl/smoelen/uiltje-files.zip') # req = Request('https://github.com/karpenoktem/uiltje/zipball/master') req = Request('http://kn.cx/groups/webcie/uiltje/Uiltje%20beta%201.zip') if etag is not None: req.add_header("If-None-Match", etag) try: fh = urlopen(req) except HTTPError as e: if e.code == 304: return False raise # XXX uncommenten # if fh.info()['ETag'] == etag: # return False # etag = fh.info()['ETag'] print fh.info() etag = "piet" zipdata = StringIO(fh.read()) if onWindows: updater = WindowsUpdater() else: updater = UnixUpdater() q = [base_path('')] while q: cur = q.pop() for fn in os.listdir(cur): fp = os.path.join(cur, fn) if os.path.isdir(fp): q.append(fp) else: updater.delete_file(fp) zipf = ZipFile(zipdata, 'r') zipf.extractall(base_path('update')) for fn in zipf.namelist(): if not os.path.isdir(base_path(os.path.join('update', fn))): updater.update_file(fn, base_path(os.path.join('update', fn))) updater.commit(etag)
def CUB(self, ratio, total_ratio=1.0): pickle_path = utils.base_path() + "/data/datasets.pkl" if os.path.isfile(pickle_path): print("Using pickled data!") datasets = pickle.load(open(pickle_path, 'rb')) img_path = datasets["train"].imgs[0][0] if utils.base_path() not in img_path: import re for phase in datasets: for i in range(len(datasets[phase])): path, o = datasets[phase].imgs[i] path = re.sub(r'^.*(data/images)', r'\1', path) datasets[phase].imgs[i] = (utils.path(path), o) return datasets train_id, test_id = self.split(ratio, total_ratio) splits = {'train': train_id, 'test': test_id} datasets = { split: CUB_Dataset(self.path, splits[split]) for split in ('train', 'test') } pickle.dump(datasets, open(pickle_path, 'wb')) print("Data loaded from disk and has been pickled!") return datasets
def commit(self, etag): self.clean_deletes() self.precreate_dirs() batch_file = base_path("update.sh") with open(batch_file, 'w') as fh: fh.write("sleep 5\n") for fn, new_file in self.updates: fh.write('mv %s %s\n' % (escapeshellarg(base_path(new_file)), escapeshellarg(base_path(fn)))) for fn in self.deletes: fn = base_path(fn) if os.path.isdir(fn): fh.write('rmdir %s\n' % escapeshellarg(fn)) else: fh.write('rm %s\n' % escapeshellarg(fn)) fh.write('echo -n %s > %s\n' % (etag, escapeshellarg(base_path('etag.dat')))) fh.write('rm -r %s\n' % escapeshellarg(base_path('update'))) fh.write('rm %s\n' % escapeshellarg(batch_file)) fh.write(escapeshellarg(base_path('uiltje.exe')))
def commit(self, etag): self.clean_deletes() self.precreate_dirs() batch_file = base_path("update.bat") with open(batch_file, 'w') as fh: fh.write("ping -n 5 127.0.0.1 >nul\r\n") for fn, new_file in self.updates: fh.write('move %s %s\r\n' % (escapeshellarg(base_path(new_file)), escapeshellarg(base_path(fn)))) for fn in self.deletes: fh.write('del %s\r\n' % escapeshellarg(base_path(fn))) fh.write('echo -n %s > %s\n' % (etag, escapeshellarg(base_path('etag.dat')))) fh.write('del %s\r\n' % escapeshellarg(batch_file)) fh.write('rmdir /s /q %s\r\n' % escapeshellarg(batch_file)) fh.write(escapeshellarg(base_path('uiltje.exe'))) subprocess.call([batch_file]) sys.exit(0)