def my_hook(d): if d['status'] == 'downloading': speed_info = "" if 'filename' in d: filename = os.path.basename(d['filename']) else: filename = "" if '_eta_str' in d and '_speed_str' in d: speed_info = " (" + d['_speed_str'] + " ETA: " + d['_eta_str'] + ")" filename = filename.encode('ascii', 'ignore').decode('ascii') dz.begin("Downloading " + filename + speed_info + "...") total_bytes = 0 if 'downloaded_bytes' in d: if 'total_bytes' in d: total_bytes = d['total_bytes'] elif 'total_bytes_estimate' in d: total_bytes = d['total_bytes_estimate'] percent = int(100 * d['downloaded_bytes'] / total_bytes) if percent > 0: utils.set_determinate_progress(True) utils.set_progress_percent(percent) if d['status'] == 'finished': utils.set_determinate_progress(False) utils.reset_progress() print('Download complete')
def train_models(): if len(utils.CATEGORIES) == 0 or utils.not_enough_images(): err = utils.get_not_enough_images_error() return jsonify(error=err) utils.delete_all_models() utils.set_maybe_old(True) maybe_update_models() try: regression_train.train() cnn_train.train() except: err = "Unknown error." utils.delete_all_models() return jsonify(error=err) if utils.train_should_stop(): utils.delete_all_models() utils.train_should_stop(False) else: utils.set_maybe_old(False) utils.reset_progress() return "ok"
def download_url(url): regex = re.compile( r'^(?:http)s?://' # http:// or https:// r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) if not regex.match(url): dz.fail("Not a valid video URL") dz.begin("Checking youtube-dl library is up to date...") utils.set_determinate_progress(False) updater.update_youtubedl() dz.begin("Preparing to download video...") utils.set_determinate_progress(False) utils.reset_progress() # Put ffmpeg in PATH for merging videos audio and video if 'apple_silicon' in os.environ: os.environ["PATH"] += os.pathsep + os.path.join( os.getcwd(), 'ffmpeg-arm') else: os.environ["PATH"] += os.pathsep + os.path.join(os.getcwd(), 'ffmpeg') # Download URL from clipboard sys.path.append("yt-dlp") from yt_dlp import YoutubeDL ydl_opts = { 'format': 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best', 'outtmpl': os.path.join(os.environ['EXTRA_PATH'], '%(title)s.%(ext)s'), 'logger': MyLogger(), 'progress_hooks': [my_hook] } try: with YoutubeDL(ydl_opts) as ydl: ydl.download([url]) except Exception as e: print(traceback.format_exc()) dz.error("Video Download Failed", "Downloading video failed with the error:\n\n" + e.message) dz.finish("Video Download Complete") dz.url(False)
def download_url(url): regex = re.compile( r'^(?:http)s?://' # http:// or https:// r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) if not regex.match(url): dz.fail("Not a valid video URL") dz.begin("Checking youtube-dl library is up to date...") utils.set_determinate_progress(False) updater.update_youtubedl() dz.begin("Preparing to download video...") utils.set_determinate_progress(False) utils.reset_progress() # Put ffmpeg in PATH for merging videos audio and video os.environ["PATH"] += os.pathsep + os.path.join(os.getcwd(), 'ffmpeg') # Download URL from clipboard sys.path.append("youtube-dl") import youtube_dl ydl_opts = { 'format': 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best', 'outtmpl': os.path.join(os.environ['EXTRA_PATH'], '%(title)s.%(ext)s'), 'logger': MyLogger(), 'progress_hooks': [my_hook] } try: with youtube_dl.YoutubeDL(ydl_opts) as ydl: ydl.download([url]) except Exception as e: print(traceback.format_exc()) dz.error("Video Download Failed", "Downloading video failed with the error:\n\n" + e.message) dz.finish("Video Download Complete") dz.url(False)
import time import sys import config import utils if __name__ == '__main__': utils.init() if len(sys.argv) != 3: print 'Usage: %s <team_username> <problem_alias>' % sys.argv[0] sys.exit(1) team_username = sys.argv[1] problem_alias = sys.argv[2] utils.reset_progress(True) task = utils.call(action='fetch_run', team_username=team_username, problem_alias=problem_alias) if not task['success']: raise Exception('Failed to fetch run.') print 'Grading run_id %s (team %s, problem %s) of type %s... ' % (task['run_id'], task['team_username'], task['alias'], task['problem_type']), module = utils.import_module(task['contest_type'], task['problem_type']) for key in ['run_metadata', 'problem_metadata', 'division_metadata']: task[key] = json.loads(task[key]) q = multiprocessing.Queue() grader = multiprocessing.Process(target=module.grade, args=(q, task, True)) grader.start()
return 'judge_id %d, contest_id %d' % (self.judge_id, self.contest_id) if __name__ == '__main__': utils.init() judge = AutoJudge() print time.strftime('[%H:%M:%S]:', time.localtime()), print 'Initialized judge to %s' % judge while True: print time.strftime('[%H:%M:%S]:', time.localtime()), task = judge.fetch_task() task_type = task['task_type'] if task_type == 'grade': task['run_metadata'] = json.loads(task['run_metadata']) print 'Grading run_id %s (team %s, problem %s) of type %s... ' % (task['run_id'], task['team_username'], task['alias'], task['problem_type']), utils.reset_progress(False) problem_metadata, division_metadata = judge.get_cached_metadata(task['problem_id'], task['division_id'], task['problem_metadata_hash'], task['division_metadata_hash']) if problem_metadata is None or division_metadata is None: utils.progress('Refreshing metadata') problem_metadata, division_metadata = judge.update_cached_metadata(task['problem_id'], task['division_id'], task['problem_metadata_hash'], task['division_metadata_hash']) task['problem_metadata'] = problem_metadata task['division_metadata'] = division_metadata module = utils.import_module(judge.contest_type, task['problem_type']) q = multiprocessing.Queue() grader = multiprocessing.Process(target=module.grade, args=(q, task, False)) grader.start() result = q.get() grader.join()
return word def clean(txt, stem=False, spell=False): # print txt txt = txt.lower().strip() txt = txt.replace('^p', '') txt = re.sub('[%s]' % re.escape(string.punctuation + u'“”«»–—―◦℅™•№▪'), ' ', txt) txt = u' '.join([convert_word(i, stem, spell) for i in tokenizer.tokenize(txt) if i not in russtop]) # print txt return txt if __name__ == '__main__': if len(sys.argv) != 3: print "Usage " + sys.argv[0] + " file cleanfile" sys.exit(1) train_file = sys.argv[1] clean_filename = sys.argv[2] prev_l = '' utils.reset_progress() with codecs.open(clean_filename, 'w', 'utf-8') as fw: for parts in utils.read_train(train_file): parts[3] = clean(parts[3], False, False) parts[4] = clean(parts[4], False, False) fw.write('\t'.join(parts)) utils.log_progress()