def preprocess_video(youtube, start, end, name): try: os.makedirs("temp") except FileExistsError: # directory already exists pass ydl_opts = { 'format': 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best', 'outtmpl': 'temp/video.mp4' } with youtube_dl.YoutubeDL(ydl_opts) as ydl: ydl.download([youtube]) start = get_time_string(int(start)) end = get_time_string(int(end)) print("Cutting video from {} to {}".format(start, end)) call([ 'ffmpeg', '-ss', start, '-i', 'temp/video.mp4', '-to', end, '-c', 'copy', 'temp/' + name + '.mp4' ]) try: os.makedirs("frames") except FileExistsError: # directory already exists pass call([ 'ffmpeg', '-i', 'temp/' + name + '.mp4', '-r', '25/1', 'frames/output%04d.png' ])
def craw_marqueen(self): content = xcurl.xcurl(self.marqueen_url).replace("\r", "").replace("\n", "") if content == "": xlog.LOG.WARN("get marqueen_url content error") return buy_content = utils.reg(config.marqueen_content_str, content) if buy_content == "": xlog.LOG.WARN("get marquee content error") return buy_list = utils.reg1(config.marqueen_item_str, buy_content) write_string = "" today = utils.get_time_string(int(time.time())) craw_day = today if self.date != "": craw_day = self.date uniq_list = list() for item in buy_list: idx = item.find("用户") if idx == -1: xlog.LOG.ERROR("find user error") continue user_phone = item[idx - 11:idx] column_list = utils.reg1(config.item_column_str, item) if len(column_list) < 3: xlog.LOG.ERROR("column_list length error") continue order_time = column_list[0] if order_time.find(craw_day) == -1: print order_time, craw_day xlog.LOG.ERROR("find order_time error") continue goods_num = column_list[2].split(" ")[-1] idx1 = item.find(config.name_str) idx2 = item.find("<i>x") goods_name = item[idx1 + len(config.name_str):idx2].strip() goods_price = 0 if goods_name in self.goods_dict.keys(): goods_price = float(self.goods_dict[goods_name]) goods_total_price = float(goods_num) * float(goods_price) print user_phone, order_time, goods_name, goods_num, goods_total_price line = user_phone + " " + order_time + " " + goods_name + " " + goods_num + " " + str(goods_total_price) if line in self.file_column: print "column exist" continue write_string += line + "\n" print write_string if write_string != "": header = "" if len(self.file_column) == 0: header = "用户 订单时间 商品 个数 总积分\n" write_string = header + write_string self.write_file(write_string)
async def _command_list(self, message): """List your reminders""" reminders = self.get_reminders(message.author.id) log.info(reminders) if not reminders: msg = "I don't have any reminder for you!" else: msg = 'Here are your current reminders:\n' for reminder in reminders.values(): in_time = reminder['at_time'] - int(datetime.now().timestamp()) msg += '`%s` "%s" in %s\n' % (reminder['uid'], reminder['message'], get_time_string(in_time)) await self.bot.client.send_message(message.author, msg)
async def _played_command(self, message): """show your game time""" msg = '' played = self.get(message.author.id) if played: msg += "As far as i'm aware, you played:\n" for game, time in played.items(): msg += '`%s : %s`\n' % (game, get_time_string(time)) else: msg = "I don't remember you playing anything :(" await self.bot.client.send_message(message.channel, msg)
def __init__(self, date): file_path = "../files/" file_name = utils.get_time_string(int(time.time())) self.date = date if self.date != "": file_name = self.date self.files = file_path + file_name + "滚雪球商城抓取结果" self.goods_file = file_path + "滚雪球商品列表" self.marqueen_url = "https://shop.gxq168.com/index/login" self.file_column = list() self.load_file() self.goods_dict = dict() self.load_goods_file()
async def _stats(self, message): """show the bot's general stats""" users = 0 for s in self.client.servers: users += len(s.members) msg = 'General informations:\n' msg += '`Admin :` <@%s>\n' % self.conf['admin_id'] msg += '`Uptime : %s`\n' % get_time_string( (datetime.now() - self._start_time).total_seconds()) msg += '`Users in touch : %s in %s servers`\n' % ( users, len(self.client.servers)) msg += '`Commands answered : %d`\n' % self._commands msg += '`Users playing : %d`\n' % len(self.timecounter.playing) await self.client.send_message(message.channel, msg)
def post(self): """Returns all storage files in a zip file. """ data = request.get_json() if 'key' not in data: return jsonify({'error': 'wrong request'}), 400 if current_app.config['SECRET_KEY'] != data['key']: return jsonify({'error': 'invalid key'}), 401 # python's zipfile library to zip a folder. # Does not save the zipped folder. filename = "storage_{0}.zip".format(get_time_string()) zip_file = _zip_storage_folder() return send_file(zip_file, attachment_filename=filename, as_attachment=True)
def post(self): if 'file' not in request.files: response = jsonify({'error': 'file not uploaded'}) return response, 400 posted_file = request.files['file'] filename_mid = posted_file.filename if filename_mid[-4:] != '.mid': response = jsonify({'error': 'a non midi file was uploaded'}) return response, 400 filename_mid = secure_filename(filename_mid) file_collection = FileCollection.generate_file_collection( filename_mid, current_app.config['MIDISTORE_PATH'], current_app.config['PARALLEL_RESULTS_PATH'], current_app.config['CHORD_RESULTS_PATH']) # save midifile posted_file.save(os.path.join(file_collection.midi_path)) if not generate_results(file_collection.midi_path, file_collection.parallels_path, file_collection.chords_path): return jsonify({'error': 'an invalid midi file was uploaded'}), 400 if not _db_commit(file_collection): return jsonify({'error': 'an internal server error occured.'}), 500 try: zip_file = _zip_results(file_collection) filename = "results_{}.zip".format(get_time_string()) return send_file(zip_file, attachment_filename=filename, as_attachment=True) except FileNotFoundError: return jsonify({'error': 'file is not properly zipped in server.'}), 500
gen_iter_array = [] # create array for recording epochs of gen_freq updates # ------------------------------------------------------------------------------------------------------------------ # set up save path # ------------------------------------------------------------------------------------------------------------------ # Save initial flow image if not os.path.exists(os.path.dirname('CTLodopab/experimentsLodopab/')): print("created CTLodopab/experimentsLodopab/") print("os.getcwd()", os.getcwd()) os.makedirs(os.path.dirname('CTLodopab/experimentsLodopab/')) else: print("experimentsLodopab already exists") save_path = './CTLodopab/experimentsLodopab/' + 'lodopabCT_CNN_' + utils.get_time_string() + \ '_lr{:.0e}'.format(learning_rate) +\ '_genFreq{:d}'.format(gen_freq) + '_nTrain{:d}'.format(n_train) + '_batchSize{:d}'.format(batch_size) + \ '_optim' + optim_string + '_etaFreq{:d}'.format(eta_freq) + '_gamma_const{:.0e}'.format(gamma_constant) +\ '_etaTol{:.0e}'.format(eta_tol) + '_noise_level{:.0e}'.format(noise_level) +\ '_mu{:.0e}'.format(mu) + '_gp_lam{:d}'.format(gp_lam) +\ '_distMan_lam{:.0e}'.format(dist_man_lam) + '_lr_decay_iter{:d}'.format(lr_decay_iter) + \ 'doAugment{:d}'.format(do_augment) + '_architecture' + architecture +\ '_stepType' + step_type + '_tvlam001/' os.makedirs(save_path, exist_ok=True) # max and min values for plotting vmin = min(u0_gen_full_train[0, :].view(-1)) vmax = max(u0_gen_full_train[0, :].view(-1)) vmin_val = min(u_true_full_val[ind_val, :].view(-1))
def __init__(self): file_path = "../files/" self.file_name = utils.get_time_string(int(time.time())) self.files = file_path + self.file_name + "滚雪球商城抓取结果" self.stat_file = file_path + "滚雪球商城抓取结果汇总"
# model.compile(optimizer='rmsprop', loss=vae_loss) # model.fit(inputs=[action_data, target_data], epochs=opts.epochs, batch_size=opts.batch_size, # callbacks=[tbCallBack, BestLossCallBack(model, opts.save_path, opts.save_period)], shuffle=True) else: print('There is no proper model for option: ' + opts.model) return callback_list = [ tbCallBack, BestLossCallBack(model, opts.save_path, opts.save_period) ] # BestLossCallBack(decoder, opts.save_path, opts.save_period, 'decoder') # training model.compile(optimizer='rmsprop', loss='mse') model.fit(action_data, target_data, epochs=opts.epochs, batch_size=opts.batch_size, callbacks=callback_list, shuffle=True) if __name__ == "__main__": options.timestamp = get_time_string() train_network(options) # ()() # ('') HAANJU.YOO
import tensorflow as tf import utils MODEL_PATH = './models/cityscapes/dense_net.py' #MODEL_PATH = './models/cityscapes/dense_net_multiplexer.py' #MODEL_PATH = './models/cityscapes/dense_net_concat_all.py' #MODEL_PATH = './models/cityscapes/dense_net_jitter.py' #MODEL_PATH = './models/cityscapes/dense_net_fix_bn.py' #MODEL_PATH = './models/cityscapes/dense_net_dilated.py' #IMG_WIDTH, IMG_HEIGHT = 2048, 1024 IMG_WIDTH, IMG_HEIGHT = 1024, 448 # IMG_WIDTH, IMG_HEIGHT = 768, 320 SAVE_DIR = os.path.join('/home/kivan/datasets/results/tmp/cityscapes', utils.get_time_string()) # SPP has 90K #tf.app.flags.DEFINE_integer('num_iters', 40000, '') # tf.app.flags.DEFINE_integer('num_iters', 30000, '') #tf.app.flags.DEFINE_integer('num_iters', 20000, '') #tf.app.flags.DEFINE_integer('num_iters', 60000, '') tf.app.flags.DEFINE_integer('max_num_epochs', 30, 'Number of epochs to run.') ###tf.app.flags.DEFINE_integer('num_iters', 20000, '') tf.app.flags.DEFINE_string('optimizer', 'adam', '') tf.app.flags.DEFINE_float('decay_power', 1.5, '') tf.app.flags.DEFINE_float('initial_learning_rate', 5e-4, '') tf.app.flags.DEFINE_float('end_learning_rate', 5e-5, '')
gen_iter_array = [] # create array for recording epochs of gen_freq updates # ------------------------------------------------------------------------------------------------------------------ # set up save path # ------------------------------------------------------------------------------------------------------------------ # Save initial flow image if not os.path.exists(os.path.dirname('CTEllipse/experimentsEllipse/')): print("created experimentsEllipse/") print("os.getcwd()", os.getcwd()) os.makedirs(os.path.dirname('CTEllipse/experimentsEllipse/')) else: print("experimentsEllipse already exists") save_path = './CTEllipse/experimentsEllipse/' + 'ellipseCT_' + utils.get_time_string() + \ '_lr{:.0e}'.format(learning_rate) +\ '_genFreq{:d}'.format(gen_freq) + '_nTrain{:d}'.format(n_train) + '_batchSize{:d}'.format(batch_size) + \ '_optim' + optim_string + '_etaFreq{:d}'.format(eta_freq) + '_gamma_const{:.0e}'.format(gamma_constant) +\ '_etaTol{:.0e}'.format(eta_tol) + '_noise_level{:.0e}'.format(noise_level) +\ '_mu{:.0e}'.format(mu) + '_gp_lam{:d}'.format(gp_lam) +\ '_distMan_lam{:.0e}'.format(dist_man_lam) + '_lr_decay_iter{:d}'.format(lr_decay_iter) + \ 'doAugment{:d}'.format(do_augment) + '_architecture' + architecture +\ '_stepType' + step_type + '_tvlam001/' os.makedirs(save_path, exist_ok=True) # max and min values for plotting vmin = min(u0_gen_full_train[0, :].view(-1)) vmax = max(u0_gen_full_train[0, :].view(-1)) vmin_val = min(u_true_full_val[ind_val, :].view(-1))