def main(update_dir): session = lt.session() load_state('server.state', session) tune_session(session) session.listen_on(28136, 28136) session.add_dht_router("router.utorrent.com", 6881) session.add_dht_router("router.bittorrent.com", 6881) session.add_dht_router("dht.transmissionbt.com", 6881) session.add_dht_router("dht.aelitis.com", 6881) session.start_dht() #session.start_natpmp() #session.start_upnp() session.start_lsd() torrent_info = make_torrent(update_dir) h = session.add_torrent({ 'ti': torrent_info, 'save_path': 'upload', 'storage_mode': lt.storage_mode_t.storage_mode_allocate, 'paused': False, 'auto_managed': False, 'duplicate_is_error': False }) print('========\nHash is: %s\n========' % (torrent_info.info_hash())) print('Start: ', h.name()) while True: s = h.status() print('State: %.2f%% complete (down: %.1f kb/s up: %.1f kB/s peers: %d) %s dht_announce:%s' % \ (s.progress * 100, s.download_rate / 1000, s.upload_rate / 1000, \ s.num_peers, s.state, s.announcing_to_dht)) alerts = session.pop_alerts() for alert in alerts: print('Alert: %s: %s' % (alert.what(), alert.message())) save_state('server.state', session) time.sleep(1) print(h.name(), '\ncomplete')
def main(uri): session = lt.session() load_state('client.state', session) tune_session(session) session.listen_on(28155, 28155) session.add_dht_router("router.utorrent.com", 6881) session.add_dht_router("router.bittorrent.com", 6881) session.add_dht_router("dht.transmissionbt.com", 6881) session.add_dht_router("dht.aelitis.com", 6881) session.start_dht() #session.start_lsd() session.start_natpmp() session.start_upnp() magnet = 'magnet:?xt=urn:btih:%s' % (uri,) h = lt.add_magnet_uri(session, magnet, {'save_path': './download', 'storage_mode': lt.storage_mode_t.storage_mode_allocate, 'paused': False, 'auto_managed': False, 'duplicate_is_error': False }) def show_alerts(): alerts = session.pop_alerts() for alert in alerts: print('Alert: %s: %s' % (alert.what(), alert.message())) while (not h.is_seed()): s = h.status() print('State: %.2f%% complete (down: %.1f kb/s up: %.1f kB/s peers: %d) %s dht_announce:%s' % \ (s.progress * 100, s.download_rate / 1000, s.upload_rate / 1000, \ s.num_peers, s.state, s.announcing_to_dht)) show_alerts() time.sleep(1) show_alerts() save_state('client.state', session) print('Download complete: %s' % (h.name(),))
def __init__(self, config): # for each item in config that is a list, select current item. save_dir = config.save_dir if not os.path.exists(save_dir): create_exp_dir(save_dir) self.save_dir = save_dir self.experiment_path = self.save_dir state_file_list = natsorted([fn for fn in os.listdir(save_dir) if fn.endswith('.pt')]) model = self._get_model(config) if config.model not in ['ORNN']: optimizer = self._get_optimizer(config, model.parameters()) else: non_orth_parameters, log_orth_parameters = get_parameters(model) normal_opt = self._get_optimizer(config, non_orth_parameters) orth_opt = self._get_optimizer(config,log_orth_parameters,orth=True) optimizer = (normal_opt, orth_opt) scheduler = self._get_scheduler(config, optimizer) # if path exists and some model has been saved in it, Load experiment if os.path.exists(save_dir) and len(state_file_list): # RESUME old experiment print('Resuming experiment from: {}'.format(save_dir)) model, optimizer, scheduler, epoch = load_state(save_dir, model, optimizer, scheduler, best=False) self.epoch = epoch (self.train_losses, self.train_accs, self.val_losses, self.val_accs, self.val_hist) = load_histories(save_dir) # start new experiment else: (self.train_losses, self.train_accs, self.val_losses, self.val_accs, self.val_hist) = ([] for i in range(5)) self.args = config self.model = model if config.device is not None: self.model = self.model.to(config.device) self.optimizer = optimizer self.scheduler = scheduler if config.cuda: self.model = self.model.cuda() if self.optimizer is not None: for state in self.optimizer.state.values(): for k, v in state.items(): if isinstance(v, torch.Tensor): state[k] = v.cuda()
def main(): parser = ArgumentParser() parser.add_argument('--skip-live-endpoint', action='store_true') parser.add_argument('--verbose', action='store_true') args = parser.parse_args() retcode = 0 setup_logging(level=logging.DEBUG if args.verbose else logging.INFO) config = load_config() state = load_state() cached_channel_state = state.get('channel_videos', {}) for channel in config['channels']: check_result = check_channel(config, args, channel, cached_channel_state.get(channel, {})) retcode |= int(check_result)
def main(): catalog = {} curr_data_date = None # Add some more to prevent error when new stocks found total = _total_stocks() + 10 widgets = [ FormatLabel( 'Processed: %(value)d / {0} (in: %(elapsed)s)'.format(total)) ] pbar = ProgressBar(widgets=widgets, maxval=total) count = 0 pbar.start() state = common.load_state() for catalog_key, url in CATELOG.items(): data_date, result = get_category_stock_info(url) if not result: raise Exception('Empty parsing result, key: {}, url: {}'.foramt( catalog_key, url)) if curr_data_date is None: curr_data_date = data_date elif curr_data_date != data_date: msg = 'Data date is not the same!'\ ' curr_data_date: %s, data_date: %s, url: %s'\ % (curr_data_date, data_date, url) common.report_error(msg) raise Exception(msg) stype, category = catalog_key for stock_no, data in result.items(): stock_data = common.load_stock(stock_no) daily_report = stock_data.setdefault(common.DAILY, {}) meta = stock_data.setdefault(common.META, {}) daily_report[data_date] = data category_key = SEPARATOR.join(catalog_key) meta.update({ common.META_STOCK_NO: stock_no, common.META_COMPANY_TYPE: stype, common.META_COMPANY_CATEGORY: category, common.META_CATEGORY_KEY: category_key, common.META_NAME: data.pop('name'), common.META_DAYS: sorted(daily_report.keys(), reverse=True), }) stock_data.setdefault(common.META, {}).update(meta) common.save_stock(stock_no, stock_data) catalog.setdefault(category_key, []).append(stock_no) pbar.update(count) count += 1 if not catalog.setdefault(SEPARATOR.join(catalog_key), []): common.report_error('NO STOCK FOUND!!!! %s, %s' % (catalog_key, url)) common.save_catalog(catalog) state[common.CURRENT_DATA_DATE] = curr_data_date common.save_state(state) pbar.finish()
def transfer(read_from, save_to): click.echo('%s --> %s' % (read_from, save_to)) if read_from not in OPTIONS or save_to not in OPTIONS: print 'Should be %s or %s' % (LOCAL, FIREBASE) sys.exit(-1) if read_from == save_to: print 'Saving data to where it is from does not make sense.' sys.exit(-2) click.echo('This will OVERWRITE data in "%s". Are you sure? [y/N]' % save_to) confirm = sys.stdin.readline() if confirm.strip() != 'y': print 'byebye~' return common.READ_FROM = common.LOCAL if read_from == LOCAL else common.FIREBASE common.SAVE_TO = (common.LOCAL,)\ if save_to == LOCAL else (common.FIREBASE,) print 'Transfering catalog...' catalog = common.load_catalog() common.save_catalog(catalog) print 'Transfering categories...' catalog = common.load_catalog() categories = common.load_categories() common.save_categories(categories) print 'Transfering filter results...' f_results = common.load_filter_results() common.save_filter_results(f_results) print 'Transfering indicator results...' i_results = common.load_indicator_results() common.save_indicator_results(i_results) print 'Transfering config...' config = common.load_config() common.save_config(config) todo = [] for stocks in catalog.values(): todo.extend(stocks) total = len(todo) print 'Transfering sotcks...' widgets = [ FormatLabel( 'Processed: %(value)d / {0} (in: %(elapsed)s)'.format(total)) ] pbar = ProgressBar(widgets=widgets, maxval=total) count = 0 pbar.start() for s in todo: data = common.load_stock(s) common.save_stock(s, data) pbar.update(count) count += 1 pbar.finish() print 'Transfering state...' catalog = common.load_catalog() state = common.load_state() common.save_state(state)
pbar.finish() print 'Transfering state...' catalog = common.load_catalog() state = common.load_state() common.save_state(state) def load_errors(): return _load_file(ERRORS, default=[]) def save_errors(data): _save_file(ERRORS, data) if __name__ == '__main__': common.READ_FROM = common.FIREBASE # load from firebase state_firebase = common.load_state() common.READ_FROM = common.LOCAL # load from local state_local = common.load_state() display_msg = ' firebase: %s\n'\ ' local: %s' % (state_firebase[common.CURRENT_DATA_DATE], state_local[common.CURRENT_DATA_DATE]) click.echo('Latest data:') click.echo(click.style(display_msg, fg='red', bold=True)) transfer()