def __init__(self, use_cache=True): # read data from cache ; # if no cache, then load the data and preprocess it, then store it to cache cache_name = f'{self.TRAIN_NAME}_nmt_preprocessed_data_{md5(self.M.data_params)}.pkl' data = read_cache(cache_name) if use_cache else None if not isinstance(data, type(None)): self.__train_src, \ self.__train_tar, \ self.__train_src_encode, \ self.__train_tar_encode, \ self.__val_src, \ self.__val_tar, \ self.__val_src_encode, \ self.__val_tar_encode, \ self.__test_src, \ self.__test_tar, \ self.__test_src_encode, \ self.__test_tar_encode, \ self.__src_tokenizer, \ self.__tar_tokenizer, \ self.__src_vocab_size, \ self.__tar_vocab_size = data else: self.__load_data() self.__preprocess() cache(cache_name, [ self.__train_src, self.__train_tar, self.__train_src_encode, self.__train_tar_encode, self.__val_src, self.__val_tar, self.__val_src_encode, self.__val_tar_encode, self.__test_src, self.__test_tar, self.__test_src_encode, self.__test_tar_encode, self.__src_tokenizer, self.__tar_tokenizer, self.__src_vocab_size, self.__tar_vocab_size, ]) print('src_vocab_size: {}\ntar_vocab_size: {}'.format( self.__src_vocab_size, self.__tar_vocab_size)) print('train_size: {}\ntest_size: {}'.format(len(self.__train_src), len(self.__test_src))) print('train_x.shape: {}\ntrain_y.shape: {}'.format( self.__train_src_encode.shape, self.__train_tar_encode.shape)) print('val_x.shape: {}\nval_y.shape: {}'.format( self.__val_src_encode.shape, self.__val_tar_encode.shape)) print('test_x.shape: {}\ntest_y.shape: {}'.format( self.__test_src_encode.shape, self.__test_tar_encode.shape))
def __init__(self, use_cache=True): # read data from cache ; # if no cache, then load the data and preprocess it, then store it to cache cache_name = f'pre{self.TRAIN_NAME}_preprocessed_data_{md5(self.M.data_params)}.pkl' data = read_cache(cache_name) if use_cache else None if not isinstance(data, type(None)): self.train_x, \ self.train_y, \ self.train_lan_x, \ self.train_lan_y, \ self.train_pos_y, \ self.test_x, \ self.test_y, \ self.test_lan_x, \ self.test_lan_y, \ self.test_pos_y, \ self.tokenizer, \ self.vocab_size = data else: self.load_data() self.preprocess_tokenizer() self.preprocess() cache(cache_name, [ self.train_x, self.train_y, self.train_lan_x, self.train_lan_y, self.train_pos_y, self.test_x, self.test_y, self.test_lan_x, self.test_lan_y, self.test_pos_y, self.tokenizer, self.vocab_size, ]) print(f'vocab_size: {self.vocab_size}\n') print( f'train_x.shape: {self.train_x.shape}\ntrain_y.shape: {self.train_y.shape}' ) print( f'train_lan_x.shape: {self.train_lan_x.shape}\ntrain_lan_y.shape: {self.train_lan_y.shape}' ) print(f'train_pos_y.shape: {self.train_pos_y.shape}') print( f'test_x.shape: {self.test_x.shape}\ntest_y.shape: {self.test_y.shape}' ) print( f'test_lan_x.shape: {self.test_lan_x.shape}\ntest_lan_y.shape: {self.test_lan_y.shape}' ) print(f'test_pos_y.shape: {self.test_pos_y.shape}')
def train(self): print('\nBuilding model ({}) ...'.format(Model.TIME)) self.model = Model(self.__src_vocab_size, self.__tar_vocab_size) # save tokenizer before training cache(os.path.join(self.model.tokenizer_dir, 'tokenizer.pkl'), self.__src_tokenizer) print('\nTraining model ...') start_time = time.time() self.model.train( (self.__train_src_encode, self.__train_tar_encode[:, :-1]), self.__train_tar_encode[:, 1:], (self.__test_src_encode, self.__test_tar_encode[:, :-1]), self.__test_tar_encode[:, 1:]) self.__train_time = time.time() - start_time print('\nFinish training')
def train(self): print('\nBuilding model ({}) ...'.format(self.M.TIME)) self.model = self.M(self.vocab_size, self.vocab_size) # save tokenizer cache(os.path.join(self.model.tokenizer_dir, 'tokenizer.pkl'), self.tokenizer) print('\nTraining model ...') start_time = time.time() self.model.train(train_x=(self.train_x, self.train_lan_x, self.train_y[:, :-1], self.train_lan_y[:, :-1]), train_y=self.train_y[:, 1:], val_x=(self.test_x, self.test_lan_x, self.test_y[:, :-1], self.test_lan_y[:, :-1]), val_y=self.test_y[:, 1:]) self.train_time = time.time() - start_time print('\nFinish training')
def get(url, file_name): data_path = os.path.join(data_dir, file_name) cache_name = os.path.splitext(data_path)[0] + '.pkl' if os.path.exists(cache_name): return read_cache(cache_name) # download and unzip data utils.download(url, data_path) with gzip.open(data_path, 'rb') as f: _data = f.read().decode('utf-8') _data = utils.full_2_half(utils.unicode_to_ascii(_data)) _data = _data.replace('\r', '').strip().split('\n\t\n') _data = list( map( lambda x: list( map( lambda line: [line.split('\t')[1].strip(), line.split('\t')[0].strip()], x.split('\n'))), _data)) cache(cache_name, _data) return _data
def command(self, user, channel, msg): if len(msg) < 2: self.msg(channel, "Need a filter and show name") return name_filter, show = msg[0], " ".join(msg[1:]) show = self.factory.resolve(show, channel) if show is None: return if not show["folder"]: self.msg(channel, "No FTP folder given for {}".format(show["series"])) return episode = show["current_ep"] + 1 ftp = yield ClientCreator(reactor, FTPClient, self.factory.config.ftp_user, self.factory.config.ftp_pass).connectTCP(self.factory.config.ftp_host, self.factory.config.ftp_port) ftp.changeDirectory("/{}/{:02d}/".format(show["folder"], episode)) filelist = FTPFileListProtocol() yield ftp.list(".", filelist) files = [x["filename"] for x in filelist.files if x["filetype"] != "d"] premux = fnmatch.filter(files, "*{}*.mkv".format(name_filter)) if not premux: self.msg(channel, "No premux found") return elif len(premux) > 1: self.msg(channel, "Too many premux files match the filter: {}".format(", ".join(premux))) return else: premux = premux[0] premux_len = [x["size"] for x in filelist.files if x["filename"] == premux][0] if os.path.isfile("{}/{}".format(self.factory.config.premux_dir, premux)): self.msg(channel, "{} already is cached. Message fugi if you need it re-cached.".format(premux)) return success = yield cache(self, user, ftp, premux, premux_len) if success: self.msg(channel, "{} cached.".format(premux)) else: self.msg(channel, "Caching of {} failed.".format(premux)) yield ftp.quit() ftp.fail(None)
def command(self, user, channel, msg): if len(msg) < 2: self.msg(channel, "Need a filter and a showname") return name_filter = msg[0] offset = 1 while msg[-1][:2] == "--": arg = msg.pop() if arg == "--previous": offset = 0 show = self.factory.resolve(" ".join(msg[1:]), channel) if show is None: return if not show["folder"]: self.msg(channel, "No FTP folder given for {}".format(show["series"])) return if not show["xdcc_folder"]: self.msg(channel, "No XDCC folder given for {}".format(show["series"])) return episode = show["current_ep"] + offset guid = uuid.uuid4().hex while os.path.exists(guid): guid = uuid.uuid4().hex os.mkdir(guid) # Step 1: Search FTP for complete episode, or premux + xdelta ftp = yield ClientCreator(reactor, FTPClient, self.factory.config.ftp_user, self.factory.config.ftp_pass).connectTCP(self.factory.config.ftp_host, self.factory.config.ftp_port) ftp.changeDirectory("/{}/{:02d}/".format(show["folder"], episode)) filelist = FTPFileListProtocol() yield ftp.list(".", filelist) files = [x["filename"] for x in filelist.files if x["filetype"] != "d"] complete = fnmatch.filter(files, "[[]Commie[]]*{}*.mkv".format(name_filter)) xdelta = fnmatch.filter(files, "*{}*.xdelta".format(name_filter)) premux = fnmatch.filter(files, "*{}*.mkv".format(name_filter)) if complete: # Step 1a: Download completed file if len(complete) > 1: self.msg(channel, "Too many completed files match the filter: {}".format(", ".join(complete))) return else: complete = complete[0] self.notice(user, "Found complete file: {}".format(complete)) complete_len = [x["size"] for x in filelist.files if x["filename"] == complete][0] complete_downloader = Downloader("{}/{}".format(guid, complete)) yield ftp.retrieveFile(complete, complete_downloader) if complete_downloader.done() != complete_len: self.msg(channel, "Aborted releasing {}: Download of complete file had incorrect size.".format(show["series"])) yield ftp.quit() ftp.fail(None) return elif xdelta and premux: # Step 1b: Download premux + xdelta, merge into completed file if len(premux) > 1: self.msg(channel, "Too many premux files match the filter: {}".format(", ".join(premux))) return else: premux = premux[0] if len(xdelta) > 1: self.msg(channel, "Too many xdelta files match the filter: {}".format(", ".join(xdelta))) return else: xdelta = xdelta[0] self.notice(user, "Found xdelta and premux: {} and {}".format(xdelta, premux)) if not os.path.isfile("{}/{}".format(self.factory.config.premux_dir, premux)): premux_len = [x["size"] for x in filelist.files if x["filename"] == premux][0] success = yield cache(self, user, ftp, premux, premux_len) if not success: self.msg(channel, "Aborted releasing {}: Download of premux file had incorrect size.".format(show["series"])) yield ftp.quit() ftp.fail(None) return shutil.copyfile("{}/{}".format(self.factory.config.premux_dir, premux), "{}/{}".format(guid, premux)) xdelta_len = [x["size"] for x in filelist.files if x["filename"] == xdelta][0] xdelta_downloader = Downloader("{}/{}".format(guid, xdelta)) yield ftp.retrieveFile(xdelta, xdelta_downloader) if xdelta_downloader.done() != xdelta_len: self.msg(channel, "Aborted releasing {}: Download of xdelta file had incorrect size.".format(show["series"])) yield ftp.quit() ftp.fail(None) return code = yield getProcessValue(getPath("xdelta3"), args=["-f","-d","{}/{}".format(guid, xdelta)], env=os.environ) if code != 0: self.msg(channel, "Aborted releasing {}: Couldn't merge premux and xdelta.".format(show["series"])) yield ftp.quit() ftp.fail(None) return self.notice(user, "Merged premux and xdelta") complete = fnmatch.filter(os.listdir(guid), "[[]Commie[]]*.mkv") if not complete: self.msg(channel, "No completed file found") return elif len(complete) > 1: self.msg(channel, "Too many completed files found after merging: {}".format(", ".join(complete))) return else: complete = complete[0] if not complete: self.msg(channel, "Aborted releasing {}: Couldn't find completed file after merging.".format(show["series"])) yield ftp.quit() ftp.fail(None) return else: self.msg(channel, "Aborted releasing {}: Couldn't find completed episode.".format(show["series"])) yield ftp.quit() ftp.fail(None) return yield ftp.quit() ftp.fail(None) # Step 1c: Verify CRC crc = complete[-13:-5] # Extract CRC from filename try: with open("{}/{}".format(guid, complete), "rb") as f: calc = "{:08X}".format(binascii.crc32(f.read()) & 0xFFFFFFFF) except: self.msg(channel, "Aborted releasing {}: Couldn't open completed file for CRC verification.".format(show["series"])) return if crc != calc: self.msg(channel, "Aborted releasing {}: CRC failed verification. Filename = '{}', Calculated = '{}'.".format(show["series"], crc, calc)) return # Step 1d: Determine version number match = re.search("(v\d+)", complete) version = match.group(1) if match is not None else "" # Step 2: Create torrent try: torrent = makeTorrent(complete, guid) except: self.msg(channel, "Aborted releasing {}: Couldn't create torrent.".format(show["series"])) raise self.notice(user, "Created torrent") # Step 3: Upload episode to XDCC server try: ftp = yield ClientCreator(reactor, FTPClient, self.factory.config.xdcc_user, self.factory.config.xdcc_pass).connectTCP(self.factory.config.xdcc_host, self.factory.config.xdcc_port) store, finish = ftp.storeFile("./{}/{}/{}".format(self.factory.config.xdcc_folder, show["xdcc_folder"], complete)) sender = yield store with open("{}/{}".format(guid, complete), "rb") as f: sender.transport.write(f.read()) sender.finish() yield finish yield ftp.quit() ftp.fail(None) except: self.msg(channel, "Aborted releasing {}: Couldn't upload completed episode to XDCC server.".format(show["series"])) raise self.notice(user, "Uploaded to XDCC") # Step 4: Upload episode to seedbox try: ftp = yield ClientCreator(reactor, FTPClient, self.factory.config.seed_user, self.factory.config.seed_pass).connectTCP(self.factory.config.seed_host, self.factory.config.seed_port) store, finish = ftp.storeFile("./{}/{}".format(self.factory.config.seed_file_folder, complete)) sender = yield store with open("{}/{}".format(guid, complete), "rb") as f: sender.transport.write(f.read()) sender.finish() yield finish yield ftp.quit() ftp.fail(None) except: self.msg(channel, "Aborted releasing {}: Couldn't upload completed episode to seedbox.".format(show["series"])) raise self.notice(user, "Uploaded to seedbox") # Step 5: Start seeding torrent try: ftp = yield ClientCreator(reactor, FTPClient, self.factory.config.seed_user, self.factory.config.seed_pass).connectTCP(self.factory.config.seed_host, self.factory.config.seed_port) store, finish = ftp.storeFile("./{}/{}".format(self.factory.config.seed_torrent_folder, torrent)) sender = yield store with open("{}/{}".format(guid, torrent), "rb") as f: sender.transport.write(f.read()) sender.finish() yield finish yield ftp.quit() ftp.fail(None) except: self.msg(channel, "Aborted releasing {}: Couldn't upload torrent to seedbox.".format(show["series"])) raise self.notice(user, "Seeding started") # Step 6: Upload torrent to Nyaa nyaagent = CookieAgent(Agent(reactor), cookielib.CookieJar()) response = yield nyaagent.request("POST","http://www.nyaa.eu/?page=login", Headers({'Content-Type': ['application/x-www-form-urlencoded']}), FileBodyProducer(StringIO(urllib.urlencode({"loginusername": self.factory.config.nyaa_user,"loginpassword": self.factory.config.nyaa_pass})))) body = yield returnBody(response) if "Login successful" not in body: self.msg(channel, "Aborted releasing {}: Couldn't login to Nyaa.".format(show["series"])) with open("{}/{}".format(guid, "nyaa_login.html"), "wb") as f: f.write(body) return twitter_list = rheinbowify('Follow [url="https://twitter.com/RHExcelion"]@RHExcelion[/url], [url="https://twitter.com/johnnydickpants"]@jdp[/url], and the rest of Commie at [url="https://twitter.com/RHExcelion/commie-devs"]@Commie-Devs[/url].') post_data = MultiPartProducer({"torrent": "{}/{}".format(guid, torrent)},{ "name": complete, "catid": "1_37", "info": "#[email protected]", "description": "Visit us at [url]http://commiesubs.com[/url] for the latest updates and news.\n{}".format(twitter_list), "remake": "0", "anonymous": "0", "hidden": "0", "rules": "1", "submit": "Upload" }) response = yield nyaagent.request("POST","http://www.nyaa.eu/?page=upload", Headers({'Content-Type': ['multipart/form-data; boundary={}'.format(post_data.boundary)]}), post_data) if response.code != 200: nyaa_codes = { 418: "I'm a teapot (You're doing it wrong)", 460: "Missing Announce URL", 461: "Already Exists", 462: "Invalid File", 463: "Missing Data", 520: "Configuration Broken" } self.msg(channel, "Aborted releasing {}: Couldn't upload torrent to Nyaa. Error #{:d}: {}".format(show["series"], response.code, nyaa_codes[response.code])) return self.notice(user, "Uploaded to Nyaa") # Step 7: Get torrent link from Nyaa body = yield returnBody(response) match = re.search("http://www.nyaa.eu/\?page=view&tid=[0-9]+", body) if not match: self.msg(channel, "Aborted releasing {}: Couldn't find torrent link in Nyaa's response.".format(show["series"])) with open("{}/{}".format(guid, "nyaa_submit.html"), "wb") as f: f.write(body) return info_link = match.group(0).replace("&","&") download_link = info_link.replace("view","download") self.notice(user, "Got Nyaa torrent link") # Step 8: Upload torrent link to TT ttagent = CookieAgent(Agent(reactor), cookielib.CookieJar()) response = yield ttagent.request("POST","http://tokyotosho.info/login.php", Headers({'Content-Type': ['application/x-www-form-urlencoded']}), FileBodyProducer(StringIO(urllib.urlencode({"username": self.factory.config.tt_user,"password": self.factory.config.tt_pass,"submit": "Submit"})))) body = yield returnBody(response) if "Logged in." not in body: self.msg(channel, "Couldn't login to TT. Continuing to release {} regardless.".format(show["series"])) with open("{}/{}".format(guid, "tt_login.html"), "wb") as f: f.write(body) else: response = yield ttagent.request("POST","http://tokyotosho.info/new.php", Headers({'Content-Type': ['application/x-www-form-urlencoded']}), FileBodyProducer(StringIO(urllib.urlencode({ "type": "1", "url": download_link, "comment": "#[email protected]", "website": "http://www.commiesubs.com/", "send": "Submit New Torrent" })))) body = yield returnBody(response) if "Torrent Submitted" not in body: self.msg(channel, "Couldn't upload torrent to TT. Continuing to release {} regardless.".format(show["series"])) with open("{}/{}".format(guid, "tt_submit.html"), "wb") as f: f.write(body) else: self.notice(user, "Uploaded to TT") # Step 9: Create blog post blog = Proxy("http://commiesubs.com/xmlrpc.php") slug = show["blog_link"].split("/")[-2] categories = ["The Bread Lines"] result = yield blog.callRemote("wp.getTerms", 0, self.factory.config.blog_user, self.factory.config.blog_pass, "category") for term in result: if term["slug"] == slug: categories.append(term["name"]) try: yield blog.callRemote("wp.newPost", 0, # Blog ID self.factory.config.blog_user, # Username self.factory.config.blog_pass, # Password { # Content "post_type": "post", "post_status": "publish", "comment_status": "open", "post_title": "{} {:02d}{}".format(show["series"], episode, version), "post_content": "<a href=\"{}\">Torrent</a>".format(info_link), "terms_names": {"category": categories} } ) self.notice(user, "Created blog post") except: self.msg(channel, "Couldn't create blog post. Continuing to release {} regardless.".format(show["series"])) # Step 10: Mark show finished on showtimes data = yield self.factory.load("show","update", data={"id":show["id"],"method":"next_episode"}) if "status" in data and not data["status"]: self.msg(channel, data["message"]) self.msg(channel, "{} released. Torrent @ {}".format(show["series"], info_link)) # Step 11: Update the topic self.factory.update_topic() # Step 12: Clean up shutil.rmtree(guid, True)
if is_ro: ro_word_dict[phrase] = True else: en_word_dict[phrase] = True if not os.path.exists(cache_dict_path): print('\nloading data from wmt news ...') loader_wmt = ro_en.Loader(0.0, 0.9) ro_data, en_data = loader_wmt.data() print('adding ro data to dict ... ') __add_to_dict(ro_data, True) print('adding en data to dict ... ') __add_to_dict(en_data, False) cache(cache_dict_path, [ro_word_dict, en_word_dict]) filtered_ro_en_dict_path = os.path.join(dictionary_dir, 'filtered_ro_en_merged.json') filtered_en_ro_dict_path = os.path.join(dictionary_dir, 'filtered_en_ro_merged.json') delete_ro_keys = [] delete_en_keys = [] def __check_has_val(val): for k, l in val.items(): if l: return True return False
def command(self, user, channel, msg): if len(msg) < 2: self.msg(channel, "Need a filter and show name") return offset, chapters_required = 1, True while msg[-1][:2] == "--": arg = msg.pop() if arg == "--previous": offset = 0 elif arg == "--no-chapters": chapters_required = False name_filter, show, fname = msg[0], " ".join(msg[1:]), "test.mkv" show = self.factory.resolve(show, channel) if show is None: return if not show["folder"]: self.msg(channel, "No FTP folder given for {}".format(show["series"])) return episode = show["current_ep"] + offset guid = uuid.uuid4().hex while os.path.exists(guid): guid = uuid.uuid4().hex os.mkdir(guid) # Step 1: Search FTP for premux + script ftp = yield ClientCreator( reactor, FTPClient, self.factory.config.ftp_user, self.factory.config.ftp_pass ).connectTCP(self.factory.config.ftp_host, self.factory.config.ftp_port) ftp.changeDirectory("/{}/{:02d}/".format(show["folder"], episode)) filelist = FTPFileListProtocol() yield ftp.list(".", filelist) files = [x["filename"] for x in filelist.files if x["filetype"] != "d"] premux = fnmatch.filter(files, "*{}*.mkv".format(name_filter)) script = fnmatch.filter(files, "*{}*.ass".format(name_filter)) chapters = fnmatch.filter(files, "*{}*.xml".format(name_filter)) if not premux: self.msg(channel, "No premux found") return elif len(premux) > 1: self.msg(channel, "Too many premux files match the filter: {}".format(", ".join(premux))) return else: premux = premux[0] if not script: self.msg(channel, "No script found") return elif len(script) > 1: self.msg(channel, "Too many script files match the filter: {}".format(", ".join(script))) return else: script = script[0] if chapters_required: if not chapters: self.msg(channel, "No chapters found") return elif len(chapters) > 1: self.msg(channel, "Too many chapter files match the filter: {}".format(", ".join(chapters))) return else: chapters = chapters[0] # Step 2: Download that shit if not os.path.isfile("{}/{}".format(self.factory.config.premux_dir, premux)): premux_len = [x["size"] for x in filelist.files if x["filename"] == premux][0] success = yield cache(self, user, ftp, premux, premux_len) if not success: self.msg( channel, "Aborted creating xdelta for {}: Download of premux file had incorrect size.".format(show["series"]), ) yield ftp.quit() ftp.fail(None) return shutil.copyfile("{}/{}".format(self.factory.config.premux_dir, premux), "{}/{}".format(guid, premux)) script_len = [x["size"] for x in filelist.files if x["filename"] == script][0] script_downloader = Downloader("{}/{}".format(guid, script)) yield ftp.retrieveFile(script, script_downloader) if script_downloader.done() != script_len: self.msg( channel, "Aborted creating xdelta for {}: Download of script file had incorrect size.".format(show["series"]), ) yield ftp.quit() ftp.fail(None) return if chapters_required: chapters_len = [x["size"] for x in filelist.files if x["filename"] == chapters][0] chapters_downloader = Downloader("{}/{}".format(guid, chapters)) yield ftp.retrieveFile(chapters, chapters_downloader) if chapters_downloader.done() != chapters_len: self.msg( channel, "Aborted creating xdelta for {}: Download of chapter file had incorrect size.".format(show["series"]), ) yield ftp.quit() ftp.fail(None) return self.notice(user, "Found premux, script and chapters: {}, {} and {}".format(premux, script, chapters)) else: self.notice(user, "Found premux and script: {} and {}".format(premux, script)) # Step 3: Download fonts filelist = FTPFileListProtocol() yield ftp.list("fonts", filelist) files = [x["filename"] for x in filelist.files if x["filetype"] != "d"] fonts = [] for font in files: font_len = [x["size"] for x in filelist.files if x["filename"] == font][0] font_downloader = Downloader("{}/{}".format(guid, font)) yield ftp.retrieveFile("fonts/{}".format(font), font_downloader) if font_downloader.done() != font_len: self.notice(user, "Failed to download font: {}. Proceeding without it.".format(font)) else: fonts.append(font) self.notice(user, "Fonts downloaded. ({})".format(", ".join(fonts))) # Step 4: MKVMerge arguments = ["-o", "{}/{}".format(guid, fname)] if chapters_required: arguments.extend(["--no-chapters", "--chapters", "{}/{}".format(guid, chapters)]) for font in fonts: arguments.extend( ["--attachment-mime-type", "application/x-truetype-font", "--attach-file", "{}/{}".format(guid, font)] ) arguments.extend(["{}/{}".format(guid, premux), "{}/{}".format(guid, script)]) code = yield getProcessValue(getPath("mkvmerge"), args=arguments, env=os.environ) if code != 0: self.msg(channel, "Aborted creating xdelta for {}: Couldn't merge premux and script.".format(show["series"])) yield ftp.quit() ftp.fail(None) return self.notice(user, "Merged premux and script") # Step 5: Determine filename match = re.search("(v\d+).ass", script) version = match.group(1) if match is not None else "" try: with open("{}/{}".format(guid, fname), "rb") as f: crc = "{:08X}".format(binascii.crc32(f.read()) & 0xFFFFFFFF) except: self.msg( channel, "Aborted creating xdelta for {}: Couldn't open completed file for CRC verification.".format(show["series"]), ) yield ftp.quit() ftp.fail(None) return nfname = "[Commie] {} - {:02d}{} [{}].mkv".format(show["series"], episode, version, crc) os.rename("{}/{}".format(guid, fname), "{}/{}".format(guid, nfname)) fname = nfname self.notice(user, "Determined final filename to be {}".format(fname)) # Step 5: Make that xdelta xdelta = script.replace(".ass", ".xdelta") code = yield getProcessValue( getPath("xdelta3"), args=[ "-f", "-e", "-s", "{}/{}".format(guid, premux), "{}/{}".format(guid, fname), "{}/{}".format(guid, xdelta), ], env=os.environ, ) if code != 0: self.msg(channel, "Aborted creating xdelta for {}: Couldn't create xdelta.".format(show["series"])) yield ftp.quit() ftp.fail(None) return self.notice(user, "Made xdelta") # Step 6: Upload that xdelta store, finish = ftp.storeFile("{}".format(xdelta)) sender = yield store with open("{}/{}".format(guid, xdelta), "rb") as f: sender.transport.write(f.read()) sender.finish() yield finish self.msg(channel, "xdelta for {} uploaded".format(show["series"])) # Step 7: Clean up yield ftp.quit() ftp.fail(None) shutil.rmtree(guid, True)