def start_axis(message): status = 0 # get client folder path_studio = os.path.join(PATH_AXIS, message['ip']) print(termcolor.colored('start_axis ... ' + helper.get_size(path_studio), 'yellow'), flush=True) for file in [ item.name for item in os.scandir(path_studio) if item.is_file() ]: try: if file.endswith(('.mkv', '.MKV')): # generate mp4 name mp4 = file.replace('mkv', 'mp4').replace('MKV', 'mp4') in_mkv = os.path.join(path_studio, file) # generate mp4 absolute path out_mp4 = os.path.join(path_studio, mp4) command = PATH_FFMPEG + ' -y -i \"' + in_mkv + '\" -metadata title="@alaa_sanatisharif" -preset ultrafast -vcodec copy -r 50 -vsync 1 -async 1 \"' + out_mp4 + '\"' process = subprocess.Popen(command, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT, shell=True) status = process.wait() + status # remove mkv after convert os.remove(in_mkv) if not os.path.exists(os.path.join(path_studio, 'done')): os.makedirs(os.path.join(path_studio, 'done')) if os.path.exists(os.path.join(path_studio, 'done', mp4)): os.remove(os.path.join(path_studio, 'done', mp4)) shutil.move(out_mp4, os.path.join(path_studio, 'done')) except: print(termcolor.colored('failed', 'red', attrs=['reverse']), flush=True)
def start_tablet(message): status = 0 # get client folder path_studio = os.path.join(PATH_TABLET, message['ip']) print(termcolor.colored('start_tablet ... ' + helper.get_size(path_studio), 'yellow'), flush=True) for file in [ item.name for item in os.scandir(path_studio) if item.is_file() ]: try: if file.endswith(('.mp4', '.MP4')): # generate mp4 name mp4 = file.replace('mp4', 'out.mp4').replace('MP4', 'OUT.MP4') in_mp4 = os.path.join(path_studio, file) # generate mp4 absolute path out_mp4 = os.path.join(path_studio, mp4) command = PATH_FFMPEG + ' -y -i \"' + in_mp4 + '\" -metadata title="@alaa_sanatisharif" -sws_flags lanczos -s 960x720 -profile:v baseline -level 3.0 -vcodec libx264 -crf 18 -r 24 -preset veryslow -pix_fmt yuv420p -tune film -acodec libfdk_aac -ab 320k -movflags +faststart \"' + out_mp4 + '\"' process = subprocess.Popen(command, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT, shell=True) status = process.wait() + status # remove mkv after convert os.remove(in_mp4) if not os.path.exists(os.path.join(path_studio, 'done')): os.makedirs(os.path.join(path_studio, 'done')) if os.path.exists(os.path.join(path_studio, 'done', mp4)): os.remove(os.path.join(path_studio, 'done', mp4)) shutil.move(out_mp4, os.path.join(path_studio, 'done')) except: print(termcolor.colored('failed', 'red', attrs=['reverse']), flush=True)
def start_rabiea(message): print(termcolor.colored('start_rabiea ... ' + helper.get_size(PATH_RABIEA), 'yellow'), flush=True) status = 0 for file in [ item.name for item in os.scandir(PATH_RABIEA) if item.is_file() ]: try: if file.endswith(('.mp4', '.MP4')): if not os.path.exists(os.path.join(PATH_RABIEA, 'done')): os.makedirs(os.path.join(PATH_RABIEA, 'done')) # generate input and output filename in_mp4 = os.path.join(PATH_RABIEA, file) out_mp4 = os.path.join(PATH_RABIEA, 'done', 'HQ-' + file) # generate different command types if message['tag'] == 'rabiea': command = PATH_FFMPEG + ' -y -i \"' + in_mp4 + '\" -metadata title="@alaa_sanatisharif" -sws_flags lanczos -s 1280x720 -profile:v baseline -level 3.0 -vcodec libx264 -crf 19 -r 24 -preset veryslow -pix_fmt yuv420p -tune film -acodec libfdk_aac -ab 128k -movflags +faststart \"' + out_mp4 + '\"' elif message['tag'] == 'rabiea-480': command = PATH_FFMPEG + ' -y -i \"' + in_mp4 + '\" -metadata title="@alaa_sanatisharif" -sws_flags lanczos -s 854x480 -profile:v baseline -level 3.0 -vcodec libx264 -crf 27 -r 24 -preset veryslow -pix_fmt yuv420p -tune film -acodec libfdk_aac -ab 128k -movflags +faststart \"' + out_mp4 + '\"' elif message['tag'] == 'rabiea-sizeless': command = PATH_FFMPEG + ' -y -i \"' + in_mp4 + '\" -metadata title="@alaa_sanatisharif" -sws_flags lanczos -profile:v baseline -level 3.0 -vcodec libx264 -crf 28 -r 24 -preset veryslow -pix_fmt yuv420p -tune film -acodec libfdk_aac -ab 96k -movflags +faststart \"' + out_mp4 + '\"' process = subprocess.Popen(command, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT, shell=True) status = process.wait() + status if os.path.exists(os.path.join(PATH_RABIEA, file)): os.remove(os.path.join(PATH_RABIEA, file)) except: print(termcolor.colored('failed', 'red', attrs=['reverse']), flush=True)
def start_paid_force(message): path_studio = os.path.join(PATH_PAID_FORCE, message['ip']) print(termcolor.colored( 'start_paid_force ... ' + helper.get_size(path_studio), 'yellow'), flush=True) threads = [] for dirpath, dirnames, filenames in os.walk(path_studio): for file in filenames: fp = os.path.join(dirpath, file) if os.path.islink(fp) or not fp.lower().endswith( ('.jpg', '.jpeg', 'png')): continue while threading.activeCount() > SIMULTANEOUS_THREADS: pass print(termcolor.colored('start_webp_generation', 'green'), flush=True) threads = [t for t in threads if t.is_alive()] threads.append( Thread(name='t: ' + str(fp), target=helper.webp, args=(fp, ))) threads[-1].start() # stay here until all threads are finished while any([t.is_alive for t in threads]): threads = [t for t in threads if t.is_alive()] command = 'sshpass -p \"' + PASSWORD + '\" rsync -avhWP --no-compress --ignore-times \"' + path_studio + os.path.sep + '\" ' + SFTP + PATH_UPSTREAM_PAID status = run_command(command) cleanup(status, path_studio, message['user_id'], message['tag'])
def start_paid(message): path_studio = os.path.join(PATH_PAID, message['ip']) print(termcolor.colored('start_paid ... ' + helper.get_size(path_studio), 'yellow'), flush=True) command = 'sshpass -p \"' + PASSWORD + '\" rsync -avhWP --no-compress --size-only \"' + path_studio + os.path.sep + '\" ' + SFTP + PATH_UPSTREAM_PAID status = run_command(command) cleanup(status, path_studio, message['user_id'], message['tag'])
def start_convert(message): global status status = 0 threads = [] # get client folder path_studio = os.path.join(PATH_CONVERT, message['ip']) print(termcolor.colored( 'start_convert ... ' + helper.get_size(path_studio), 'yellow'), flush=True) for set in [ item.name for item in os.scandir(path_studio) if item.is_dir() ]: if os.path.isdir(os.path.join(path_studio, set, PATH_HIGH)): # create output folders if not os.path.exists(os.path.join(path_studio, set, PATH_MID)): os.makedirs(os.path.join(path_studio, set, PATH_MID)) if not os.path.exists(os.path.join(path_studio, set, PATH_LOW)): os.makedirs(os.path.join(path_studio, set, PATH_LOW)) for file in [ item.name for item in os.scandir( os.path.join(path_studio, set, PATH_HIGH)) if item.is_file() ]: try: if file.endswith(('.mp4', '.MP4')): # generate output file names in_high = os.path.join(path_studio, set, PATH_HIGH, file) out_mid = os.path.join(path_studio, set, PATH_MID, file) out_low = os.path.join(path_studio, set, PATH_LOW, file) command = PATH_FFMPEG + ' -y -i \"' + in_high + '\" -metadata title="@alaa_sanatisharif" -sws_flags lanczos -s 854x480 -profile:v baseline -level 3.0 -vcodec libx264 -crf 27 -r 24 -preset veryslow -pix_fmt yuv420p -tune film -acodec libfdk_aac -ab 96k -movflags +faststart \"' + out_mid + '\" -sws_flags lanczos -s 426x240 -profile:v baseline -level 3.0 -vcodec libx264 -crf 27 -r 24 -preset veryslow -pix_fmt yuv420p -tune film -acodec libfdk_aac -ab 64k -movflags +faststart \"' + out_low + '\"' # wait for available threads while threading.activeCount() > SIMULTANEOUS_THREADS: pass time.sleep(1) # remove finished stacks threads = [t for t in threads if t.is_alive()] # push thread to stack threads.append( Thread(name='t: ' + str(in_high), target=single_convert, args=(command, ))) threads[-1].start() except: print(termcolor.colored('failed', 'red', attrs=['reverse']), flush=True) # wait for all threads to finish while any([t.is_alive for t in threads]): threads = [t for t in threads if t.is_alive()] nondestructive_move(os.path.join(path_studio, set), os.path.join(path_studio, 'done'), set)
def run(): train_map, dev_map, test_map = get_datamap(composers=composers, split=True) train_size = get_size(train_map) dev_size = get_size(dev_map) print(train_size, dev_size) # Create Train and Dev generators train_gen = randomized_generator(train_map, batch_size=batch_size, timesteps=timesteps, n_classes=n_classes) dev_gen = randomized_generator(dev_map, batch_size=batch_size, timesteps=timesteps, n_classes=n_classes) train_iterations = int(train_size/(10*batch_size)) dev_iterations = int(dev_size/(10*batch_size)) print(train_iterations, dev_iterations) # Train Model composer_clf = Composer_Classifier(n_classes=n_classes) composer_clf.create_model() composer_clf.train(train_generator=train_gen, dev_generator=dev_gen, steps_per_epoch=train_iterations, validation_steps=dev_iterations, epochs=n_epochs) # Test x_test, y_test = get_batch(test_map, batch_size=test_batch_size, timesteps=100) composer_clf.test(x_test, y_test)
def start_announce(message): print(termcolor.colored( 'start_announce ... ' + helper.get_size(PATH_ANNOUNCE), 'yellow'), flush=True) status = 0 for set in [ item.name for item in os.scandir(PATH_ANNOUNCE) if item.is_dir() ]: if os.path.isdir(os.path.join(PATH_ANNOUNCE, set, PATH_HIGH)): if not os.path.exists(os.path.join(PATH_ANNOUNCE, set, PATH_MID)): os.makedirs(os.path.join(PATH_ANNOUNCE, set, PATH_MID)) if not os.path.exists(os.path.join(PATH_ANNOUNCE, set, PATH_LOW)): os.makedirs(os.path.join(PATH_ANNOUNCE, set, PATH_LOW)) for file in [ item.name for item in os.scandir( os.path.join(PATH_ANNOUNCE, set, PATH_HIGH)) if item.is_file() ]: try: if file.endswith(('.mp4', '.MP4')): in_high = os.path.join(PATH_ANNOUNCE, set, PATH_HIGH, file) out_mid = os.path.join(PATH_ANNOUNCE, set, PATH_MID, file) out_low = os.path.join(PATH_ANNOUNCE, set, PATH_LOW, file) command = PATH_FFMPEG + ' -y -i \"' + in_high + '\" -metadata title="@alaa_sanatisharif" -sws_flags lanczos -s 854x854 -profile:v baseline -level 3.0 -vcodec libx264 -crf 28 -r 24 -preset veryslow -pix_fmt yuv420p -tune film -acodec libfdk_aac -ab 64k -movflags +faststart \"' + out_mid + '\" -sws_flags lanczos -s 426x426 -profile:v baseline -level 3.0 -vcodec libx264 -crf 28 -r 24 -preset veryslow -pix_fmt yuv420p -tune film -acodec libfdk_aac -ab 50k -movflags +faststart \"' + out_low + '\"' process = subprocess.Popen(command, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT, shell=True) status = process.wait() + status except: print(termcolor.colored('failed', 'red', attrs=['reverse']), flush=True) nondestructive_move(os.path.join(PATH_ANNOUNCE, set), os.path.join(PATH_ANNOUNCE, 'done'), set)
def start_upload(message, src_path, dst_path): path_studio = os.path.join(src_path, message['ip']) print(termcolor.colored('start ' + message['tag'] + ' ... ' + helper.get_size(path_studio), 'yellow'), flush=True) threads = [] print(termcolor.colored('start_webp_generation', 'green'), flush=True) for dirpath, dirnames, filenames in os.walk(path_studio): for file in filenames: fp = os.path.join(dirpath, file) if os.path.islink(fp) or not fp.lower().endswith(('.jpg', '.jpeg', 'png')): continue while threading.activeCount() > SIMULTANEOUS_THREADS: pass threads = [t for t in threads if t.is_alive()] threads.append(Thread(name='t: ' + str(fp), target=helper.webp, args=(fp,))) threads[-1].start() # stay here until all threads are finished while any([t.is_alive for t in threads]): threads = [t for t in threads if t.is_alive()] command = MC_MIRROR + path_studio + os.path.sep + ' ' + dst_path status = run_command(command) cleanup(status, path_studio, message['user_id'], message['tag'])
parser = argparse.ArgumentParser() parser.add_argument('data_dir', action = 'store', help='image directory') parser.add_argument('--save_dir', action = 'store', help = 'save checkpoint') parser.add_argument('--in_file', type = str, default = "label_map.json", help = 'input json file') parser.add_argument('--arch', action = 'store', default = 'vgg19', help = 'architecture') parser.add_argument('--epochs', action = 'store', type = int, default = 6, help = 'number of epochs') parser.add_argument('--learning_rate', action = 'store', type = float, default = 0.03, help = 'learning rate') parser.add_argument('--hidden_units', action = 'store', type = int, default = 2900, help = 'number of hidden units') parser.add_argument('--out_size', action = 'store', type = int, default = 102, help = 'number of outputs') parser.add_argument('--drop_p', type = float, default = 0.5, help = 'probability of dropping the weights') parser.add_argument('--gpu', action = 'store_true', help = 'use gpu') args = parser.parse_args() json_path = args.in_file label_map = helper.load_label_map(json_path) data_dir = args.data_dir train_data, validation_data, test_data, trainloader, validloader, testloader = helper.preprocess(data_dir) model_ = classifier.build_model(args.hidden_units, len(label_map), args.drop_p, args.arch) model = helper.premodel(args.arch) for param in model.parameters(): param.requires_grad = False in_size = helper.get_size(model, args.arch) model.classifier = helper.Network(in_size, args.out_size, [args.hidden_units], drop_p = 0.5) criterion = nn.NLLLoss() optimizer = optim.SGD(model.classifier.parameters(), lr = args.learning_rate) helper.train(model, trainloader, validloader, criterion, optimizer, args.epochs, 40, args.gpu) test_accuracy, test_loss = helper.valid_loss_acc(model, testloader, criterion, args.gpu) print("Test Accuracy: {:.4f} ".format(test_accuracy), "Test Loss: {:.4f}".format(test_loss)) helper.save_checkpoint(model, train_data, optimizer, args.save_dir, args.arch)