def process_sync(sync_id): sync = Sync.get(sync_id) if not sync: return if Transfer.find_one({'sync_id': sync['_id'], 'finished': None}): set_retry(sync) return src = get_uris(**sync['src']) if not src: set_retry(sync) return dst = get_uris(**sync['dst']) if not dst: set_retry(sync) return dst = dst[0] parameters = { 'exclusions': sync.get('exclusions'), 'delete': sync.get('delete'), } is_mount = (sync['dst'].get('uuid') or sync['src'].get('uuid')) is not None transfer_id = Transfer.add(src, dst, sync_id=sync['_id'], parameters=parameters, is_mount=is_mount) logger.info('added transfer %s to %s', src, dst) sync['transfer_id'] = transfer_id sync['processed'] = datetime.utcnow() sync['reserved'] = datetime.utcnow() + timedelta(hours=sync['recurrence']) Sync.save(sync, safe=True)
def create_media(): data = request.json type = data.get('type') if not type: return jsonify(error='missing type') langs = data.get('langs') or [] if 'id' in data: if not data.get('mode'): return jsonify(error='missing mode') id = ObjectId(data['id']) search = _get_object_search(id, type) if not search: return jsonify(error='%s %s does not exist' % (type, id)) search['langs'] = langs search['mode'] = data['mode'] search['safe'] = False if not Search.add(**search): return jsonify(error='failed to create search %s' % search) return jsonify(result=True) name = data.get('name') if not name: return jsonify(error='missing name') if type == 'url': dst = Settings.get_settings('paths')['finished_download'] try: Transfer.add(name, dst) except Exception, e: return jsonify(error='failed to create transfer: %s' % str(e))
class Manager: def __init__(self, screen, clock, **kwargs): # self.turns = turns() self.screen = screen self.status = _INTRO self.minigames = [] self.board = Board(screen=screen, clock=clock) self.intro = Slideshow(screen=screen, slides=((1800000, "intro.png"),)) self.minigames.append((Missiles(screen=screen, clock=clock), ["Dodge"])) self.minigames.append((WhackAMullet(screen=screen, clock=clock), ["Kill"])) self.minigames.append((Freeze(screen=screen, clock=clock), ["Dodge", "Kill", "Jump", "Type"])) self.minigames.append((Platformer(clock=clock, screen=screen), ["Jump"])) # self.minigames.append(EasyWin(screen)) self.minigames.append((ClickClack(screen), ["Type"])) self.current = self.intro self.catcher = None self.wait = 7 self.currWait = 0 self.counter = 0 def draw(self, screen): self.current.draw() def update(self): if self.status != _TRANSFER: self.catcher = self.current.update(result=self.catcher) if self.status == _MINI and self.catcher != None: print "switching to tbg" self.current.reset() self.current = self.board self.status = _TBG self.currWait = self.wait elif self.status == _TBG and self.catcher != None: print "switching to transfer" self.easywin = EasyWin(self.screen) self.status = _TRANSFER self.counter += 1 if self.counter == len(self.minigames): self.counter = 0 select = self.minigames[self.counter] # select = self.minigames[random.randint(0,len(self.minigames)-1)] self.current = Transfer(self.screen, select[1][random.randint(0, len(select[1]) - 1)], select[0]) # self.current = self.minigames[random.randint(0,len(self.minigames)-1)] # self.status = _MINI self.currWait = self.wait elif self.status == _INTRO and self.catcher != None: self.catcher = None self.current.reset() self.current = self.board self.status = _TBG self.currWait = self.wait else: # clear queue # self.currWait -= 1 result = self.current.update() pygame.event.get() if result != None: print "switching to mini" self.current = result self.status = _MINI
def run(): Transfer.remove({'finished': { '$lt': datetime.utcnow() - DELTA_OBSOLETE, }}, safe=True) if Settings.get_settings('transmission').get('active', True): clean_torrents() if Settings.get_settings('sabnzbd').get('active', True): clean_nzbs()
def create_transfer(): data = request.json src = data.get('src') if not src: return jsonify(error='missing src') try: Transfer.add(src, data.get('dst'), data.get('type')) except Exception, e: return jsonify(error=str(e))
def process_query(query): '''Process the query and return the number of results. ''' if urlparse(query).scheme: dst = Settings.get_settings('paths')['finished_download'] try: Transfer.add(query, dst) except Exception, e: logger.error('failed to create transfer for %s: %s', query, str(e)) return -1 count = 1
def main(): pm.COMMANDLINE = True args = parse_args() pwd = None try: if not args.free: if KEYRING: pwd = keyring.get_password('pyfilemail', args.username) elif pm.NETRC: machine = pm._netrc.authenticators(args.username) if machine: pwd = machine[2] else: pwd = None if pwd is None: pwd = getpass.getpass('Please enter Filemail password: '******'pyfilemail', args.username, pwd) fm_user = User(args.username, password=pwd) transfer = Transfer( fm_user, to=args.to, subject=args.subject, message=args.message, notify=args.notify, confirmation=args.confirm, days=args.days, downloads=args.downloads, password=args.password, checksum=args.checksum, zip_=args.compress ) transfer.add_files(args.payload) res = transfer.send() if res.status_code == 200: msg = '\nTransfer complete!' logger.info(msg) except KeyboardInterrupt: msg = '\nAborted by user!' logger.warning(msg)
def _restore_transfers(self, response): """Restore transfers from josn retreived Filemail :param response: response object from request :rtype: ``list`` with :class:`Transfer` objects """ transfers = [] for transfer_data in response.json()['transfers']: transfer = Transfer(self, _restore=True) transfer.transfer_info.update(transfer_data) transfer.get_files() transfers.append(transfer) return transfers
def _callback(self, processed, total): if not Transfer.find_one({ '_id': self.transfer['_id'], 'finished': None, }): logger.info('aborted sftp transfer %s to %s', self.transfer['src'], self.transfer['dst']) sys.exit(1) # to avoid zombies now = time.time() if now - self.last_callback < settings.PROGRESS_CALLBACK_DELTA: return self.last_callback = now self.transfer['transferred'] = self.transferred self.transfer['info']['name'] = self.name Transfer.save(self.transfer, safe=True)
def __init__(self, M=None, mf_fit="ST", delta_h=200.0, delta_wrt='mean', cut_fit=True, z2=None, nz=None, delta_c=1.686, mv_scheme="trapz", **kwargs): """ Initializes some parameters """ if M is None: M = np.linspace(10, 15, 501) # A list of all available kwargs (sent to Cosmology via Transfer) self._cp = ["sigma_8", "n", "w", "cs2_lam", "t_cmb", "y_he", "N_nu", "omegan", "H0", "h", "omegab", "omegac", "omegav", "omegab_h2", "omegac_h2", "force_flat", "default"] # Set up a simple dictionary of kwargs which can be later updated self._cpdict = {k:v for k, v in kwargs.iteritems() if k in self._cp} # Set all given parameters. self.mf_fit = mf_fit self.M = M self.delta_h = delta_h self.delta_wrt = delta_wrt self.cut_fit = cut_fit self.z2 = z2 self.nz = nz self.delta_c = delta_c self.transfer = Transfer(**kwargs) self.mv_scheme = mv_scheme tools.check_kr(self.M[0], self.M[-1], self.cosmo.mean_dens, self.transfer.lnk[0], self.transfer.lnk[-1])
def process(self): query = self._get_query() dst = Settings.get_settings('paths')['finished_download'] logger.info('processing %s search "%s"', self.category, query) self._search_url() for result in results(query, category=self.category, sort=self.session['sort_results'], pages_max=self.session['pages_max'], **self._get_filters(query)): if not result: self.session['nb_errors'] += 1 continue Result.add_result(result, search_id=self._id) if not result.auto: continue if self.safe and not result.safe: continue if result.get('hash'): spec = {'info.hash': result.hash} else: spec = {'src': result.url} if Transfer.find_one(spec): continue self.session['nb_results'] += 1 if not self._validate_result(result): self.session['nb_pending'] += 1 continue if self.mode == 'inc': self._add_next('episode') transfer_id = Transfer.add(result.url, dst, type=result.type) self.transfers.insert(0, transfer_id) self.session['nb_downloads'] += 1 logger.info('found "%s" on %s (%s)', result.title, result.plugin, result.url) if self.mode != 'ever': break
def manage_torrent(client, hash, dst): try: torrent = client.get_torrent(hash=hash) if not torrent: return if not client.check_torrent_files(torrent, check_unfinished=settings.CHECK_UNFINISHED_TORRENTS): invalid_dir = str(Settings.get_settings('paths')['invalid']) if torrent.progress == 100 and not client.move_files(torrent, invalid_dir): return if client.remove_torrent(hash=hash, delete_data=True): logger.info('removed invalid torrent "%s" (%s%% done)', torrent.name, int(torrent.progress)) Transfer.update({'info.hash': hash, 'finished': None}, {'$set': {'finished': datetime.utcnow()}}, safe=True) return if torrent.progress == 100: destination = client.get_destination_dir(torrent, dst) if not client.move_files(torrent, destination): return if client.remove_torrent(hash=hash): logger.info('moved finished torrent "%s" to %s', torrent.name, dst) Transfer.update({'info.hash': hash, 'finished': None}, {'$set': {'finished': datetime.utcnow()}}, safe=True) return torrent_settings = Settings.get_settings('torrent') now = datetime.utcnow() inactive_delta = torrent_settings['inactive_delta'] if inactive_delta: date = torrent.date_active or torrent.date_added if date < now - timedelta(hours=inactive_delta) \ and client.remove_torrent(hash=hash, delete_data=True): logger.debug('removed inactive torrent "%s": no activity since %s', torrent.name, date) return added_delta = torrent_settings['added_delta'] if added_delta: date = torrent.date_added if date < now - timedelta(hours=added_delta) \ and client.remove_torrent(hash=hash, delete_data=True): logger.debug('removed obsolete torrent "%s": added %s', torrent.name, date) except TransmissionError, e: logger.error('torrent client error: %s', str(e))
def process_transfer(self, id): transfer = Transfer.find_one({'_id': id, 'queued': None}) if not transfer: return try: info = get_torrent_client().add_torrent(transfer['src']) Transfer.update({'_id': transfer['_id']}, {'$set': { 'queued': datetime.utcnow(), 'info': info, }}, safe=True) except TorrentExists, e: Transfer.update({'_id': transfer['_id']}, {'$set': {'finished': datetime.utcnow()}}, safe=True) logger.debug('failed to start torrent: %s', str(e))
def run(): running = {} for res in Transfer.find({ 'started': {'$ne': None}, 'finished': None, }): add_running(running, res['type']) settings_ = Settings.get_settings('general') retry_delta = timedelta(seconds=settings_['retry_delta']) for transfer in Transfer.find({ 'finished': None, '$or': [ {'added': None}, { 'added': {'$lt': datetime.utcnow() - retry_delta}, 'started': None, 'tries': {'$lt': settings_['max_tries']}, }, ], }): limit = settings.WORKERS_LIMITS.get(transfer['type']) if limit and running.get(transfer['type'], 0) >= limit: continue factory = get_factory() target = '%s.workers.add.process' % settings.PACKAGE_NAME if factory.get(target=target, args=(transfer['_id'], transfer['type'])): continue factory.add(target=target, args=(transfer['_id'], transfer['type']), timeout=settings.PROCESS_TIMEOUT) now = datetime.utcnow() Transfer.update({'_id': transfer['_id']}, { '$set': {'added': now, 'started': now}, '$inc': {'tries': 1}, }, safe=True) add_running(running, transfer['type']) count_str = ' (#%s)' % transfer['tries'] if transfer['tries'] > 1 else '' logger.info('started%s %s transfer %s to %s', count_str, transfer['type'], transfer['src'], transfer['dst'])
def audio(self): print("audio") if len(self.audio_folder) != 0: files = Transfer(self.audio_folder) self.s.send(str.encode("!list_server_audio_files")) self.listen_list(files) else: message_box = Popups("Error", "Please select audio folder") message_box.message_box()
def __init__(self, conf_obj): """This class returns an object which has the required configuration values. Please add/delete data from __init__ to suit requirements.""" self.test = 0 self.conf_obj = conf_obj self.transSftp = Transfer("sftp") # for next version this should be changed to # ftp_trans.FtpTrans(self.conf_obj) self.ftp_obj = ftp_trans.FtpTrans() self.runid = self.conf_obj.runid self.backup_dir = self.conf_obj.backup_path # Change below code to modify logging format if not os.path.isdir(self.backup_dir): raise error.FnfError(self.backup_dir, "Error: backup dir not found" + self.backup_dir + "; Exiting") self.root_dir = self.conf_obj.root_path self.runid_dir = os.getcwd() self.transCopy = Transfer("copy")
def test_predict(self): c = Params() c.load_config_file(JSON_PATH) cnn = Transfer(c, MODEL_NAME, PATCH_TYPE) model = cnn.load_model(mode=0) model.compile(optimizer="RMSprop", loss='categorical_crossentropy', metrics=['accuracy']) imgCone = ImageCone(c, Open_Slide()) # 读取数字全扫描切片图像 tag = imgCone.open_slide("Tumor/Tumor_004.tif", None, "Tumor_004") seeds = [(34880, 48960), (36224, 49920), (13312, 57856)] # C, C, S result = cnn.predict(model, imgCone, 20, 256, seeds) print(result) result = cnn.predict_on_batch(model, imgCone, 20, 256, seeds, 2) print(result)
def getEntityByFilter(self, filters): """get all entities matching a filter Arguments: filter {[[array of filters]]} -- [filters for datastore of the form ["key","operator","val"]] Returns: [list] -- [list of matching entities] """ query = self._client.query(kind=self._kind) for filter in filters: query.add_filter(*filter) transfers_list = [] transfers = list(query.fetch()) for entity in transfers: transfer = Transfer(**entity) transfer.transferId = entity.key.id transfers_list.append(transfer.get_dict()) return transfers_list
def test_extract_features_for_train_batch(self): c = Params() c.load_config_file(JSON_PATH) # "inception_v3", "densenet121", "densenet169", "densenet201", "resnet50", "inception_resnet_v2", # "vgg16", "mobilenet_v2" MODEL_NAME_set = ["densenet169"] SAMPLE_FIlENAME_set = [ "T_NC_2000_256", "T_NC_4000_256" ] # "T_NC_500_128", , "T_NC_4000_256" "T_NC_2000_256" PATCH_TYPE_set = ["2000_256", "4000_256"] # "500_128", , "4000_256" "2000_256" type_set = zip(PATCH_TYPE_set, SAMPLE_FIlENAME_set) for p_item, f_item in type_set: for net_item in MODEL_NAME_set: cnn = Transfer(c, net_item, p_item) cnn.extract_features_for_train(f_item, 100)
def debit_points(self, user, signature, amount=0.0): """Debit points from user. Need to verify sufficient points.""" if self.hosting_node == None: return False transfer = Transfer(user, signature, amount) if Verification.verify_single_transfer(transfer, self.get_balance): self.__open_transfers.append(transfer) # participants.add(user) self.save_data() return True return False
def credit_points(self, user, signature, amount=0.0): """Credit points to user. No checks required""" if self.hosting_node == None: return False transfer = Transfer(user, signature, amount) if not Wallet.verify_transfer(transfer): return False self.__open_transfers.append(transfer) # participants.add(user) self.save_data() return True
def transfer_event_source(request: Request): """ SSE (Server Sent Event) endpoint for transfer events. """ return StreamingResponse( Transfer.stream(request), media_type='text/event-stream', headers={ 'Cache-Control': 'no-cache', 'X-Accel-Buffering': 'no', }, )
def connect(self): self.s = socket.socket() self.s.settimeout(5) try: self.s.connect(self.addr) except socket.error: return "Server not found. Try again." self.trans = Transfer(self.s) self.trans.send(self.username.encode()) response = self.trans.recvData() if not response: return "Server not responding." if response == b"in-use": return "Username already in use." if response == b"success": self.s.settimeout(None) self.connected = True self.s.ioctl(socket.SIO_KEEPALIVE_VALS, (1, 5000, 3000)) threading.Thread(target=self.mainThread, daemon=True).start() return True
def process_transfer(self, id): from mediacore.web.search.plugins.filestube import get_download_urls, FilestubeError self.transfer = Transfer.find_one({'_id': id}) if not self.transfer: return try: src = get_download_urls(self.transfer['src']) except FilestubeError, e: logger.error('failed to get filestube urls for %s: %s', self.transfer['src'], str(e)) return
class TransferTest(unittest.TestCase): def setUp(self): sp = MagicMock(spotipy.Spotify()) self.transfer = Transfer(sp, True) self.abbey_road = ['Abbey Road', 'The Beatles'] self.transfer.get_itunes_album = MagicMock( return_value=self.abbey_road) self.transfer.curr_album_artist = None def test_changed_None_Some(self): changed = self.transfer.album_changed() self.assertTrue(changed) self.assertEqual(self.transfer.get_album_artist(), self.abbey_road) def test_changed_Some_Some(self): self.transfer.curr_album_artist = ['Revolver', 'The Beatles'] changed = self.transfer.album_changed() self.assertTrue(changed) self.assertEqual(self.transfer.get_album_artist(), self.abbey_road) def test_changed_None_None(self): self.transfer.album_changed = MagicMock(return_value=None) changed = self.transfer.album_changed() self.assertFalse(changed) def test_changed_Some_None(self): self.transfer.album_changed = MagicMock(return_value=None) self.transfer.curr_album_artist = self.abbey_road changed = self.transfer.album_changed() self.assertFalse(changed)
def _callback(self, download_total, downloaded, upload_total, uploaded): if not Transfer.find_one({ '_id': self.transfer['_id'], 'finished': None, }): self.aborted = True return now = time.time() if now - self.last_callback < settings.PROGRESS_CALLBACK_DELTA: return self.last_callback = now transferred = downloaded or uploaded self.transferred_current = transferred transferred += self.transferred self.transfer['total'] = self.total self.transfer['transferred'] = transferred total = float(self.transfer['total']) self.transfer['progress'] = transferred * 100 / total if total else 0 self.transfer['info']['name'] = self.name Transfer.save(self.transfer, safe=True)
def process_transfer(self, id): import media from mediacore.web.search.plugins.rutracker import download_torrent, DownloadError transfer = Transfer.find_one({'_id': id, 'queued': None}) if not transfer: return try: data = download_torrent(transfer['src']) except DownloadError, e: logger.error('failed to start torrent: %s', str(e)) return
def load_data(self): """Initialize blockchain + open transfers data from a file""" try: with open("blockchain.txt", mode="r") as f: file_content = f.readlines() blockchain = json.loads(file_content[0][:-1]) # OrderedDict updated_blockchain = [] for block in blockchain: converted_transfers = [ Transfer(tx["user"], tx["signature"], tx["amount"]) for tx in block["transfers"] ] # converted_transfers = [OrderedDict( # [('user', tx['user']), ('amount', tx['amount'])]) for tx in block['transfers']] updated_block = Block( block["index"], block["previous_hash"], converted_transfers, block["proof"], block["timestamp"], ) updated_blockchain.append(updated_block) self.__chain = updated_blockchain open_transfers = json.loads(file_content[1][:-1]) # OrderedDict updated_transfers = [] for tx in open_transfers: updated_transfer = Transfer(tx["user"], tx["signature"], tx["amount"]) # updated_transfer = OrderedDict( # [('user', tx['user']), ('amount', tx['amount'])]) updated_transfers.append(updated_transfer) self.__open_transfers = updated_transfers peer_nodes = json.loads(file_content[2]) self.__peer_nodes = set(peer_nodes) except (IOError, IndexError): pass
def process_transfer(self, id): self.transfer = Transfer.find_one({'_id': id}) if not self.transfer: return src = self.transfer['src'] dst = self.transfer['dst'] if not isinstance(src, (tuple, list)): src = [src] temp_dir = Settings.get_settings('paths')['tmp'] res = self.process(src, dst, temp_dir) if res: self.transfer['info']['files'] = res self.transfer['transferred'] = self.total self.transfer['progress'] = 100 logger.info('finished http transfer %s to %s', src, dst) else: self.transfer['started'] = None logger.error('failed to process http transfer %s to %s', src, dst) self.transfer['finished'] = datetime.utcnow() Transfer.save(self.transfer, safe=True)
def process_transfer(self, id): from mediacore.web.search.plugins.binsearch import get_nzb, BinsearchError transfer = Transfer.find_one({'_id': id, 'queued': None}) if not transfer: return qs = parse_qs(urlparse(transfer['src']).query) try: name = clean(qs['b'][0]) except KeyError: Transfer.update({'_id': transfer['_id']}, {'$set': {'finished': datetime.utcnow()}}, safe=True) logger.error('failed to get nzb name from %s', transfer['src']) return try: nzb_data = get_nzb(transfer['src']) except BinsearchError, e: Transfer.update({'_id': transfer['_id']}, {'$set': {'started': None}}, safe=True) logger.error('failed to get nzb data from %s: %s', transfer['src'], str(e)) return
def manage_nzbs(): client = get_nzb_client() for transfer in Transfer.find({ 'started': {'$ne': None}, 'finished': None, 'type': 'binsearch', }): nzb_id = transfer['info'].get('nzo_id') if not nzb_id: continue info = client.get_nzb(nzb_id) if not info: Transfer.update({'_id': transfer['_id']}, {'$set': {'finished': datetime.utcnow()}}, safe=True) else: info['name'] = info.get('filename', transfer['info'].get('name')) total = get_float(info.get('mb', 0)) * 1024 ** 2 Transfer.update({'_id': transfer['_id']}, {'$set': { 'total': total, 'transferred': total - get_float(info.get('mbleft', 0)) * 1024 ** 2, 'progress': get_float(info.get('percentage', 0)), 'info': info, }}, safe=True) paths = Settings.get_settings('paths') # Manage queued nzbs for nzb in client.list_nzbs(): transfer = get_nzb_transfer(nzb['nzo_id']) if not transfer: now = datetime.utcnow() Transfer.add(nzb['filename'], str(paths['default']), type='binsearch', added=now, started=now, queued=now, info={'nzo_id': nzb['nzo_id']}) elif transfer['finished']: client.remove_nzb(nzb['nzo_id']) logger.info('removed finished nzb "%s" (%s)', nzb['filename'], nzb['nzo_id']) # Manage finished nzbs for nzb in client.list_nzbs(history=True): transfer = get_nzb_transfer(nzb['nzo_id']) if nzb['status'] == 'Completed': dst = transfer['dst'] if transfer else str(paths['default']) elif nzb['status'] == 'Failed': dst = str(paths['invalid']) else: continue manage_nzb(client, nzb_id=nzb['nzo_id'], dst=dst)
def __init__(self): wx.Frame.__init__(self, None, -1, "Budget ver. 1.0", size=(1100, 650)) self.Centre(wx.BOTH) panel = wx.Panel(self, -1) notebook = wx.Notebook(panel) notebook.AddPage(Expenses(notebook), "Expenses") notebook.AddPage(Income(notebook), "Income") notebook.AddPage(Transfer(notebook), "Transfer") notebook.AddPage(Accounts(notebook), "Accounts") notebook.AddPage(Analysis(notebook), "Analysis") notebook.AddPage(Manage(notebook), "Manage") sizer = wx.BoxSizer() sizer.Add(notebook, 1, wx.EXPAND) panel.SetSizer(sizer)
def __init__(self, verbose=False): self.classname = "pyUpdater" self.version = "0.1" self.configdir = "/opt/pyUpdater" self.configfile = "conf/pyUpdater.json" self.configCustomFile = "" self.verbose = verbose self.loadConfig() logging.basicConfig(filename=self.config['logfile'], level=self.config['loglevel'], format=self.config['logformat']) self.preservefile = self.config['whitelist'].split(';') self.createTmpDir() # INIT transfer module self.tr = Tr() self.init_transfer()
def process_transfer(self, id): self.transfer = Transfer.find_one({'_id': id}) if not self.transfer: return src = self.transfer['src'] dst = self.transfer['dst'] parameters = self.transfer.get('parameters', {}) try: self.process(src, dst, exclude=parameters.get('exclusions'), delete=parameters.get('delete', True)) self.transfer['transferred'] = self.transferred self.transfer['progress'] = 100 logger.info('finished ftp transfer %s to %s', src, dst) except Exception, e: self.transfer['started'] = None logger.error('failed to process ftp transfer %s to %s: %s', src, dst, str(e))
def get_chosen_transfer(self, startTime, flightTime): """Returns the transfer with the specified start and flight times. Arguments: startTime (float): time in seconds since epoch of transfer start flightTIme (float): time in seconds of transfer duration Returns: The transfer at with the specified start and flight times """ if self.transferType == 'ballistic': trs = Transfer(self.startOrbit, self.endOrbit, startTime, \ flightTime, False, self.ignoreInsertion, \ self.cheapStartOrb, self.cheapEndOrb) elif self.transferType == 'plane change': trs = Transfer(self.startOrbit, self.endOrbit, startTime, \ flightTime, True, self.ignoreInsertion, \ self.cheapStartOrb, self.cheapEndOrb) elif self.transferType == 'optimal': btr = Transfer(self.startOrbit, self.endOrbit, \ startTime, flightTime, \ False, self.ignoreInsertion, \ self.cheapStartOrb, self.cheapEndOrb) ptr = Transfer(self.startOrbit, self.endOrbit, \ startTime, flightTime, \ True, self.ignoreInsertion, \ self.cheapStartOrb, self.cheapEndOrb) bdv = btr.get_total_delta_v() pdv = ptr.get_total_delta_v() if bdv <= pdv: trs = btr else: trs = ptr else: raise Exception('uncrecognized transfer type') # gen = trs.genetic_refine() return trs # , gen
def updateTransfer(userId, transferId): """update transfer in db with matching Entity id Returns: [Response] -- [updated transfer,response code] """ try: if validateTransferBody(connexion.request.json) is True: transfer = dsHelper.updateEntity( transferId, Transfer(userId=userId, **connexion.request.json)) transfer.get_dict().pop("deleted", None) return transfer.get_dict(), 200 return validateTransferBody(connexion.request.json), 400 except Exception as e: print(e) traceback.print_tb(e.__traceback__) return False, 400 return None
def add_transfer(self, outgoing_id, receiving_id, transfer_amount): """ Convenient transfer wrapper """ transfer = Transfer(user_id=self.user_id, outgoing_program=outgoing_id, receiving_program=receiving_id, outgoing_amount=transfer_amount) db.session.add(transfer) outgoing_program = self.get_balance(outgoing_id) receiving_program = self.get_balance(receiving_id) transfer_ratio = ratio_instance(outgoing_id, receiving_id).ratio_to() # Update balance for outgoing & receiving program in balances table outgoing_program.transferred_from(transfer_amount) receiving_program.transferred_to(transfer_amount, transfer_ratio) return transfer
def list_syncs(): now = datetime.utcnow() items = [] for res in Sync.find(sort=[('processed', DESCENDING)]): res['name'] = '%s to %s' % (_get_params_str(res['src']), _get_params_str(res['dst'])) if res.get('transfer_id'): transfer = Transfer.find_one({'_id': ObjectId(res['transfer_id'])}) else: transfer = None res['transfer'] = transfer or {} if not transfer: res['status'] = 'pending' elif transfer['finished'] and transfer['finished'] > now - timedelta(hours=res['recurrence'] + 24): res['status'] = 'ok' else: res['status'] = 'queued' items.append(res) return serialize({'result': items})
def show_menu(self): print() print(' MAIN MENU') print(' ---------') print(' 1: View Pokemon List') print(' 2: Transfer Menu') print(' 3: Evolve Menu') print(' 4: Rename Menu') print(' 0: Exit') choice = int(input("\nEnter choice: ")) if choice == 1: self.print_pokemons(self.pokemons) elif choice == 2: Transfer(self).run() elif choice == 3: Evolve(self).run() elif choice == 4: Renamer(self).run() elif choice == 0: quit() else: quit()
def update(self): if self.status != _TRANSFER: self.catcher = self.current.update(result=self.catcher) if self.status == _MINI and self.catcher != None: print "switching to tbg" self.current.reset() self.current = self.board self.status = _TBG self.currWait = self.wait elif self.status == _TBG and self.catcher != None: print "switching to transfer" self.easywin = EasyWin(self.screen) self.status = _TRANSFER self.counter += 1 if self.counter == len(self.minigames): self.counter = 0 select = self.minigames[self.counter] # select = self.minigames[random.randint(0,len(self.minigames)-1)] self.current = Transfer(self.screen, select[1][random.randint(0, len(select[1]) - 1)], select[0]) # self.current = self.minigames[random.randint(0,len(self.minigames)-1)] # self.status = _MINI self.currWait = self.wait elif self.status == _INTRO and self.catcher != None: self.catcher = None self.current.reset() self.current = self.board self.status = _TBG self.currWait = self.wait else: # clear queue # self.currWait -= 1 result = self.current.update() pygame.event.get() if result != None: print "switching to mini" self.current = result self.status = _MINI
async def upload_file( *, path, force: bool = Query( False, title='Force create even existed, old file will be deleted', ), transfer_id: str = Query( None, alias='transfer', title='Transfer UUIDv4', ), request: Request, ): ensure_me(request) f = File(path) ensure_not_type(f, NodeType.Dir) if f and not force: raise HTTPException(409, 'Existed') if transfer_id: transfer = Transfer(transfer_id, int(request.headers['content-length'])) else: transfer = None await f.create(request.stream(), transfer=transfer)
def postTransfer(userId): """add a new transfer to db Returns: [Response] -- [id of the newly inserted datastore entity,status code] """ try: if validateTransferBody(connexion.request.json) is True: if (transactionCheck := makeTransaction( # noqa: E231,E203,E999,E251,E261 userId, connexion.request.json["amount"])) is not True: print(transactionCheck) return ( { "error": "error creating transaction: {}".format( transactionCheck["error"]) }, 400, ) transferId = dsHelper.putEntity( Transfer(userId=userId, **connexion.request.json)) return {"transferId": transferId}, 201 return validateTransferBody(connexion.request.json), 400
def rekordbox_sync(self, drive): username = os.getlogin() files = Transfer( f"{drive[:3]}Users\\{username}\\AppData\\Roaming\\Pioneer\\rekordbox\\" ) # rb_files = ["automixPlaylist6.xml"] rb_files = [ "master.db", "master.backup.db", "networkAnalyze6.db", "masterPlaylists6.xml", "automixPlaylist6.xml" ] for rb_file in rb_files: files.prepare_to_send_file(self.s, files.path, rb_file) for root, d, file in os.walk(f"{files.path}share\\PIONEER"): if files.send_root(self.s, root): for f in file: files.prepare_to_send_file(self.s, root, f) message_box = Popups( "Finished", "Finished sending database, artwork and waveform files") message_box.message_box()
def manage_torrents(): client = get_torrent_client() for transfer in Transfer.find({ 'started': {'$ne': None}, 'finished': None, 'type': 'torrent', }): hash = transfer['info'].get('hash') if not hash: continue try: torrent = client.get_torrent(hash=hash) except TorrentError: continue if not torrent: transfer['finished'] = datetime.utcnow() logger.debug('torrent %s is not queued', hash) else: transfer['info'] = torrent transfer['transferred'] = torrent.transferred transfer['total'] = torrent.size transfer['progress'] = torrent.progress Transfer.save(transfer, safe=True) default_dir = str(Settings.get_settings('paths')['default']) for torrent in client.iter_torrents(): transfer = Transfer.find_one({'info.hash': torrent.hash}, sort=[('created', DESCENDING)]) if not transfer or (transfer['finished'] \ and torrent.date_added > transfer['finished']): now = datetime.utcnow() Transfer.add(torrent.magnet_url, default_dir, type='torrent', added=now, started=now, info={'hash': torrent.hash}) logger.info('started torrent transfer %s to %s', torrent.magnet_url, default_dir) elif transfer['finished']: client.remove_torrent(hash=torrent.hash, delete_data=True) logger.debug('removed finished torrent "%s" (%s)', torrent.name, torrent.hash) else: manage_torrent(client, hash=torrent.hash, dst=transfer['dst'])
print("\nAs",country,"we give you 2 options: ") print("\t1. Accept", imm_level, "new people into your country") print("\t2. Give us", req, "units of", most_needed) choice = input("Put your choice here: ") while choice not in ["1", "2"]: choice = input("Please enter 1 or 2: ") if choice == "1": state = imm.execute() print("Immigration has been executed.") elif choice == "2": t = Thresholds(state, myCountry) # check if our country has enough resources to make this transaction if t.is_valid_transfer(most_needed, req): transfer = Transfer(state, myCountry, country, (most_needed,req)) state = transfer.execute() print("Transfer has been executed.") # if our country lacks the resource, force the immigration else: print("You don't have enough of this resource to complete the transfer.") state = imm.execute() print("Immigration has been executed.") t = Thresholds(state, myCountry) # all resources at comfortable level if t.is_perfect(): print("YOU WIN!") finished = True
class Account: def __init__(self): self.user_orders = list() x = ComplexBuy(0, 0, 0) self.user_orders.append(x) self.user_orders.remove(x) self.transfer = None def new_order(self, type, amount, opening_price, limit_price, today_tomorrow): if today_tomorrow == 'today': if type == 'buy': self.user_orders.append( ComplexBuy(opening_price, limit_price, amount)) elif type == 'sell': self.user_orders.append( ComplexSell(opening_price, limit_price, amount)) elif today_tomorrow == 'tomorrow': if type == 'buy': self.user_orders.append( ComplexTomorrowBuy(opening_price, limit_price, amount)) elif type == 'sell': self.user_orders.append( ComplexTomorrowSell(opening_price, limit_price, amount)) def cancel_order(self): # TODO cancel_order should send NUN if needed! """"" Cancels the last order of the user_orders, if possible if the last order is None or is finished or is already cancelled, nothing will happen and -1 will be returned :returns 0 if Cancellation is successful :returns -1 if Cancellation is not successful! """ if self.get_last_order() is not None and \ not self.get_last_order().is_done() and \ not self.get_last_order().is_cancelled: self.get_last_order().is_cancelled = True return 0 else: return -1 def get_last_order(self): if self.user_orders is not None and \ len(self.user_orders) > 0: return self.user_orders[-1] else: return None def last_order_status(self): order_status = 'وضعیت سفارش: ' if self.has_active_order(): order_status += '🔵\n' + self.get_last_order().raw_report() else: order_status += 'سفارش فعالی موجود نیست 🔴' order_status += '\n' return order_status def has_active_order(self): # TODO if Buy or sell, in addition to is_done, has active or inactive boolean, # it too should be checked here! return self.get_last_order() is not None and \ not self.get_last_order().is_done() and \ not self.get_last_order().is_cancelled def orders_report_by_num(self, num_go_back): return_value = '' for order in self.user_orders[-num_go_back:]: return_value += order.short_report() if not return_value: return_value = 'سفارشی یافت نشد! ❗️' return return_value def orders_report_by_name(self, searched_order): searched = Translate.order(searched_order) searched_price = searched['price'] searched_volume = searched['amount'] return_value = '' for order in self.user_orders: if order.is_equal(searched_price, searched_volume): return_value += order.complete_report() if not return_value: return_value = 'سفارشی یافت نشد! ❗️' return return_value def new_transfer(self): if self.transfer is None: self.transfer = Transfer() else: self.transfer.turn_on() def change_transfer_rule(self, transfer_rule_text): translated_transfer_text = Translate.transfer_rule(transfer_rule_text) if translated_transfer_text is not None: if translated_transfer_text[ 'buy_price_limit'] > translated_transfer_text[ 'sell_price_limit']: return -1 else: self.transfer.change_rule( translated_transfer_text['amount_limit'], translated_transfer_text['buy_price_limit'], translated_transfer_text['sell_price_limit']) return 0 else: return -2 def is_transfer_active(self): return self.transfer is not None and \ self.transfer.is_active def transfer_status(self): transfer_status = 'وضعیت جابهجایی: \n' if self.transfer is None: transfer_status += 'قانونی برای جابهجایی تعریف نشده است! 🔴' else: transfer_status += 'قانون جابهجایی: \n' + self.transfer.raw_report( ) + '\n' if self.transfer.is_active: transfer_status += 'جابهجایی فعال است! 🔵' else: transfer_status += 'جابهجایی غیر فعال است! 🔴' transfer_status += '\n' return transfer_status def transfer_report(self): if self.transfer is not None: return self.transfer.report() def account_status(self): return self.last_order_status() + '\n' + self.transfer_status()
return k_I(self.nu, nf, np.linalg.norm(B), sin_theta, s=2.5, q=q_e, m=m_e) if __name__ == '__main__': jet = Jet() from transfer import Transfer transf = Transfer(jet, math.pi / 4, ( 10, 10, ), ( 0.1, 0.1, )) origin = np.array([0., 0.1, 9.]) direction = transf.los_direction from geometry import Ray ray = Ray(origin, direction) n = -np.array(direction) jet.create_ray(ray, n=10, max_dtau=1. * 10**(-16)) # t1, t2 = jet.geometry.hit(ray) # k = 10 # dt = abs(t2 - t1) / k # t_edges = [t2 + i * dt for i in xrange(k)] # t_cells = [min(t1, t2) + (i + 0.5) * dt for i in xrange(k - 1)]
def get_nzb_transfer(id): return Transfer.find_one({'info.nzo_id': id}, sort=[('created', DESCENDING)])
def test_merge_save_model(self): c = Params() c.load_config_file(JSON_PATH) cnn = Transfer(c, MODEL_NAME, PATCH_TYPE) cnn.merge_save_model()
def remove_transfer(): data = request.json if not data.get('id'): return jsonify(error='missing id') Transfer.cancel(data['id']) return jsonify(result=True)
class CatTar(CommandOp): ################################################################## # This function is called when an object of the class is created. # # We basically define required parameters and extract relevant # command line information. # ################################################################## def __init__(self, conf_obj): """This class returns an object which has the required configuration values. Please add/delete data from __init__ to suit requirements.""" self.test = 0 self.conf_obj = conf_obj self.transSftp = Transfer('sftp') #for next version this should be changed to #ftp_trans.FtpTrans(self.conf_obj) self.ftp_obj = ftp_trans.FtpTrans() self.runid = self.conf_obj.runid self.backup_dir = self.conf_obj.backup_path #Change below code to modify logging format if(not os.path.isdir(self.backup_dir)): raise error.FnfError(self.backup_dir, "Error: backup dir not found"+\ self.backup_dir+"; Exiting") self.root_dir = self.conf_obj.root_path self.runid_dir = os.getcwd() self.transCopy = Transfer('copy') self.summaryName = "" ################################################################## # Creates directories of users under backup directory # Calls calcpath for further processing (for every lane and for # every user ################################################################## def create_dir(self): logging.debug("dir list: "+str(self.conf_obj.lane_user_dict)) for lane, name in self.conf_obj.lane_user_dict.iteritems(): if not self.conf_obj.perm_dict[lane][0]: continue os.chdir(self.runid_dir) self.s_what = "s_"+lane+"_" if not os.path.isdir(name): os.mkdir(name) #Recreating dir tree on the ftp server os.chdir(name) self.cur_name_dir = os.getcwd() self.calcpath() self.add_htm() ################################################################## #This function populates different path variables and calls concat #as many times as required. Concat handles further processing ################################################################## def calcpath(self): self.Bustard_path = self.conf_obj.Bustard_path self.Gerald_path = self.conf_obj.Gerald_path os.chdir(self.Bustard_path) reqd_list = commands.getoutput("ls "+self.s_what+\ "[1-9]_*_qseq.txt").split("\n") ind = [] try: ind.append(reqd_list[0].split('_')[2]) except IndexError: logging.debug("here is the problem causing ind "+str(ind)) #The below for loop takes care of the case where there could #be 2 kinds of files for same lane. #Example:s_2_1_... s_2_2_... for file in reqd_list: if file.split('_')[2] not in ind: ind.append(file.split('_')[2]) #Files which need to be included in the tar file. You could #directly add to this list to make things easier for i in ind: #if there is only one set of files for every lane if len(ind) == 1: self.files = {self.s_what+'eland_extended.txt':'', self.s_what+'sequence.txt':'', self.s_what+'sorted.txt':'', self.s_what+'export.txt':''} #otherwise more than one set of files else: self.files = {self.s_what+i+'_'+'eland_extended.txt'\ :'', self.s_what+i+'_'+'sequence.txt':'', self.s_what+i+'_'+'sorted.txt':'', self.s_what+i+'_'+'export.txt':''} #Add in paths where corresponding files can be found for file in self.files.iterkeys(): if not file.endswith("qseq.txt"): self.files[file] = self.Gerald_path else: self.files[file] = self.Bustard_path self.concat(i) ################################################################## # Concatenates all txt files under Bustard ################################################################## def concat(self, i_str): #Separate out the name of the current user. #Used only for logging purposes cur_name = self.cur_name_dir.split("/")[-1] os.chdir(self.Bustard_path) concat_files = '' reqd_list = commands.getoutput("ls "+self.s_what+i_str+\ "_*_qseq.txt").split("\n") logging.info("Starting loop for user "+cur_name) for file in reqd_list: concat_files += file +' ' logging.info("Starting concatenation of files") logging.debug("Starting concatenation of files under "+\ self.Bustard_path) logging.debug('cat '+self.s_what+i_str+'_qseq.txt ') if(self.test): print 'cat '+concat_files+'> '+\ os.path.join(self.cur_name_dir,\ self.s_what+i_str)+'_qseq.txt \n' else: os.system('cat '+concat_files+'> '+\ os.path.join(self.cur_name_dir,\ self.s_what+i_str)+'_qseq.txt ') logging.info("Finished concatenation of files") logging.debug("Finished concatenation of files under "+\ self.Bustard_path) self.copy_tar() ################################################################## # Copies all files under Bustard and Gerald to the path for the # user. Then tars the files. ################################################################## def copy_tar(self): all_paths = '' #Put all files to be copied in a single string #Will it be better to do it one by one? for file, path in self.files.iteritems(): all_paths += os.path.join(path, file) + ' ' logging.info("Starting copying of files") logging.debug("Starting copying of files under "+\ self.Bustard_path+" and "+ self.Gerald_path+\ " to "+self.cur_name_dir) logging.debug("cp "+all_paths+" "+self.cur_name_dir) if(self.test): print "cp "+all_paths+" "+self.cur_name_dir+"\n" else: for f in all_paths.split(): self.transCopy.copy(f.strip(), self.cur_name_dir) logging.info("Finished copying of files") os.chdir(self.cur_name_dir) logging.info("Renaming of files") for file in os.listdir(os.getcwd()): if file.endswith(".txt"): if(self.test): print "mv "+file+" "+self.runid+'_'+file+"\n" else: #TODO: raghu. check error self.move(file, self.runid+'_'+file) for file in os.listdir(os.getcwd()): if file.endswith(".txt"): logging.info("Starting tarring of file "+file) logging.debug("gzip "+file) if(self.test): print "gzip "+file else: os.system("gzip "+file) logging.info("Finished tarring of file "+file) ################################################################## #Copies summary, plots and htm from Bustard and Gerald into a temp #directory. Tars the temp directory and deletes the intermediate #directory. ################################################################## def add_htm(self): self.Bustard_path = self.conf_obj.Bustard_path self.Gerald_path = self.conf_obj.Gerald_path logging.info("Starting summary tar creation") os.chdir(self.runid_dir) os.mkdir(self.runid+"summary") os.chdir(self.runid+"summary") logging.info("Copying htm files from Bustard") for file in glob.glob(os.path.join(self.Bustard_path, "*.[hH][Tt][Mm]")): self.transCopy.copy(file, ".") logging.info("Copying Plots folder from Bustard") self.transCopy.copy(os.path.join(self.Bustard_path, "Plots"), os.path.join(os.getcwd(),"Plots")) os.mkdir(self.Gerald_path.split('/')[-1]) os.chdir(self.Gerald_path.split('/')[-1]) logging.info("Copying htm files from Gerald ") for file in glob.glob(os.path.join(self.Gerald_path, "*.[hH][Tt][Mm]")): self.transCopy.copy(file, ".") logging.info("Copying Plots folder from Gerald") self.transCopy.copy(os.path.join(self.Gerald_path, "Plots"), os.path.join(os.getcwd(), "Plots")) logging.info("Starting tarring of Summary") os.chdir(self.runid_dir) os.system("tar -czf "+self.runid+"_summary.tar.gz "+\ self.runid+"summary") logging.info("Finished tarring of Summary") self.copy_file() ################################################################## # Copies the required files for different users. ################################################################## def copy_file(self): logging.info("Making copies of summary tar") os.chdir(self.runid_dir) logging.debug(self.conf_obj.copy_to_runid) logging.debug(self.conf_obj.copy_to_users) logging.debug(str(self.conf_obj.lane_user_dict)+" HI") for file in self.conf_obj.copy_to_runid: self.transCopy.copy(file, os.getcwd()) for file in self.conf_obj.copy_to_users: for name in self.conf_obj.lane_user_dict.itervalues(): self.transCopy.copy(file, name) if file.split('/')[-1] == 'Summary.htm': os.chdir(name) logging.debug("Renaming Summary.htm "+os.getcwd()) self.move('Summary.htm', self.runid+'_Summary.htm') os.chdir('..') os.chdir(self.runid_dir) self.removeDir(self.runid+"summary") def ftp_files(self): i = 0 ############################################################# #name_folder format # #1. For Non-quest # Length of list = 2 # [<name of user>, <folder to cd to on quest>] #2. For Quest # Length of list = 4 # [<name of user>, <folder to cd to on quest>, <sample id>, # <experiment id>] ############################################################# for lane, name_folder in self.conf_obj.ftp_user_dict.\ iteritems(): if not self.conf_obj.perm_dict[lane][1]: continue os.chdir(self.runid_dir) os.chdir(name_folder[0]) file_list = commands.getoutput("ls *s_"+lane+"*").\ split("\n") file_list.append(self.runid+"_summary.tar.gz") file_list.append(self.runid+"_Summary.htm") if len(name_folder) < 4: self.ftp_obj.connect() self.ftp_obj.chdir(name_folder[1]) try: self.ftp_obj.mkdir(self.runid) except ftplib.error_perm: self.ftp_obj.connect() self.ftp_obj.chdir(name_folder[1]) logging.debug("runid dir already there") self.ftp_obj.chdir(self.runid) try: self.ftp_obj.mkdir(name_folder[0]) except ftplib.error_perm: self.ftp_obj.connect() self.ftp_obj.chdir(name_folder[1]) self.ftp_obj.chdir(self.runid) logging.debug("user dir also already there") self.ftp_obj.chdir(name_folder[0]) else: if self.conf_obj.ftp_quest: self.ftp_obj.connect() self.ftp_obj.chdir(name_folder[1]) for file in file_list: logging.info("Transferring file %s of user %s" \ %(file, name_folder[0])) if self.conf_obj.ftp_quest: self.ftp_obj.copy_file(file) else: self.transCopy.copy(file, '/cccstor/'+\ name_folder[1]+'/'+file) logging.debug('cp '+file+' /cccstor/'+\ name_folder[1]+'/'+file) if self.conf_obj.ftp_quest: self.ftp_obj.close() logging.info("Finished transfer !!!") if len(name_folder) == 4: sid = name_folder[2] eid = name_folder[3] self.conf_obj.server.UpdateQuestSampleResults(sid,\ eid, file_list, self.runid, \ "IlluminaGAII", "heWex45--") def sftp_files(self): #name_folder = [uname, passwd, email id] for lane, name_folder in self.conf_obj.sftp_user_dict.\ iteritems(): if not self.conf_obj.perm_dict[lane][1]: continue os.chdir(self.runid_dir) os.chdir(name_folder[0]) file_list = commands.getoutput("ls *s_"+lane+"*").\ split("\n") file_list.append(self.runid+"_summary.tar.gz") file_list.append(self.runid+"_Summary.htm") self.transSftp.setServerParams(self.conf_obj.sftp_server, name_folder[0], name_folder[1]) for file in file_list: logging.info("Transferring file %s of user %s" \ %(file, name_folder[0])) self.transSftp.copy(file, "/data/repos/solexa/"+\ name_folder[0]+"/"+file) self.mail_obj = mail.Mail() if(name_folder[2] != ""): email_text = "Results for Illumina run "+self.runid+\ " has been uploaded to our SFTP server.\n"+\ "SFTP server: "+self.conf_obj.sftp_server+\ "\nUsername: "******"\nPassword: "******"\n" email_text += "Use winscp to download the files\n"+\ "http://winscp.net/eng/download.php\n" email_text += "\nRegards,\n\n"+\ "Biomedical Informatics Shared Resource\n"+ \ "The Ohio State University Comprehensive Cancer"+\ "Center\n"+\ "210 Biomedical Research Tower\n 460 W 12th Ave\n"+\ "Columbus, OH 43210\n (614) 366-1538\n" self.mail_obj.mail_diff(name_folder[2], \ "Illumina run "+self.runid, \ email_text)
def list_transfers(): items = [_get_transfer(t) for t in Transfer.find({'finished': None})] return serialize({'result': items})
def test_evaluate_cnn_svm_rf_model(self): c = Params() c.load_config_file(JSON_PATH) cnn = Transfer(c, MODEL_NAME, PATCH_TYPE) cnn.evaluate_cnn_svm_rf_model(SAMPLE_FIlENAME, 100)
def test_extract_features_for_train(self): c = Params() c.load_config_file(JSON_PATH) cnn = Transfer(c, MODEL_NAME, PATCH_TYPE) cnn.extract_features_for_train(SAMPLE_FIlENAME, 100)
class pyUpdater: def __init__(self, verbose=False): self.classname = "pyUpdater" self.version = "0.1" self.configdir = "/opt/pyUpdater" self.configfile = "conf/pyUpdater.json" self.configCustomFile = "" self.verbose = verbose self.loadConfig() logging.basicConfig(filename=self.config['logfile'], level=self.config['loglevel'], format=self.config['logformat']) self.preservefile = self.config['whitelist'].split(';') self.createTmpDir() # INIT transfer module self.tr = Tr() self.init_transfer() def loadConfig(self): print(self.configCustomFile) if (self.configCustomFile == ""): with open(self.configdir + '/' + self.configfile, 'r') as f: self.config = json.load(f) else: with open(self.configCustomFile, 'r') as f: self.config = json.load(f) def setConfigFile(self, path): self.configCustomFile = path self.loadConfig() def init_transfer(self): self.tr.server = self.config['transfer']['server'] self.tr.username = self.config['transfer']['username'] self.tr.password = self.config['transfer']['password'] def createTmpDir(self): self.tmpdir = td.mkdtemp() self.log("Create temporary directory: " + self.tmpdir) def extractUpdate(self): self.log("Extract update!") archive_name = os.path.join(self.tmpdir, self.config['updatefile']) shutil.unpack_archive(archive_name, self.config['dstdir']) def backup(self): archive_name = os.path.join(self.config['backupdir'], 'backup') shutil.make_archive(archive_name, 'gztar', self.config['dstdir']) self.log("Create backup file: " + archive_name + ".tar.gz") def restore(self): archive_name = os.path.join(self.config['backupdir'], 'backup') + '.tar.gz' shutil.unpack_archive(archive_name, self.config['dstdir']) self.log("Restored file in " + archive_name + ".tar.gz to " + self.config['dstdir']) def update(self): if self.checkupdate(): self.log("New update present. Start updating ...") self.backup_whitelist() self.remove_old_file() self.extractUpdate() self.restore_whitelist() else: self.log("New version not found!") quit(0) def dump(self): self.log("Dump database") self.dump_database() self.tr.send(self.config['backupdir'] + self.dumpname, "/tmp" + self.dumpname) def restoredump(self): self.log("Restore database from dump") self.restore_database() # todo: remote copy def checkupdate(self): self.log("Start update procedure.") url = self.config['updateserver'] + "/" + self.config['updatefile'] r = requests.get(url) if r.status_code != 200: self.log("Update not present. Error code: " + r.status_code.__str__()) return False else: with open(os.path.join(self.tmpdir, self.config['updatefile']), 'wb') as f: f.write(r.content) return True def backup_whitelist(self): for f in self.preservefile: self.log("Save file: " + os.path.join(self.config['dstdir'], f)) shutil.copy2(os.path.join(self.config['dstdir'], f), self.tmpdir + '/') def restore_whitelist(self): for f in self.preservefile: self.log("Restore file: " + os.path.join(self.config['dstdir'], f)) filename = os.path.basename(self.tmpdir + '/' + f) shutil.copy2(self.tmpdir + '/' + filename, os.path.join(self.config['dstdir'], f)) def remove_old_file(self): for r, d, f in os.walk(self.config['dstdir']): for file in f: os.remove(os.path.join(r, file)) def dump_database(self): try: timestamp = str(int(time.time())) self.dumpname = "/dump_" + self.config[ 'db_name'] + "_" + timestamp + ".sql.gz" p = subprocess.Popen("mysqldump -h " + self.config['db_host'] + " -u " + self.config['db_user'] + " -p" + self.config['db_pass'] + " " + self.config['db_name'] + " | gzip > " + self.config['backupdir'] + self.dumpname, shell=True) # Wait for completion p.communicate() # Check for errors if (p.returncode != 0): raise "Backup return code: " + p.returncode self.log("Backup done for " + self.config['db_name']) except: self.log("Backup failed for " + self.config['db_name']) def restore_database(self): try: # trovare ultimo dump sotto la directory dei backup list_of_files = glob.glob(self.config['backupdir'] + '/*.sql.gz') latest_file = max(list_of_files, key=os.path.getctime) if latest_file != '': p = subprocess.Popen("zcat " + latest_file + " | mysql -h " + self.config['db_host'] + " -u " + self.config['db_user'] + " -p" + self.config['db_pass'] + " " + self.config['db_name'], shell=True) # Wait for completion p.communicate() # Check for errors if p.returncode != 0: raise "Restore return code: " + p.returncode self.log("Restore done for " + latest_file) # no dump found else: self.log("No dumps found in " + self.config['backupdir'] + ". Cannot restore anything, sorry.") except: self.log("Restore failed for " + latest_file) def log(self, msg, level='debug'): if self.verbose: print(msg) logging.debug(msg) def test(self): # check if destination directory exist and is writeable print("") print("-------------------# CHECK FILESYSTEM #-------------------") if os.path.exists(self.config['dstdir']): print(bcolors.OKBLUE + '[ok]' + bcolors.ENDC, self.config['dstdir'] + ' exist.') if os.access(self.config['dstdir'], os.W_OK): print(bcolors.OKBLUE + '[ok]' + bcolors.ENDC, self.config['dstdir'] + ' is writeable.') else: print(bcolors.FAIL + '[fail]' + bcolors.ENDC, self.config['dstdir'] + ' is not writeable.') else: print(bcolors.FAIL + '[fail]' + bcolors.ENDC, self.config['dstdir'] + ' not exist. Create this folder.') # check if backup directory exist and is writeable if os.path.exists(self.config['backupdir']): print(bcolors.OKBLUE + '[ok]' + bcolors.ENDC, self.config['backupdir'] + ' exist.') if os.access(self.config['backupdir'], os.W_OK): print(bcolors.OKBLUE + '[ok]' + bcolors.ENDC, self.config['backupdir'] + ' is writeable.') else: print(bcolors.FAIL + '[fail]' + bcolors.ENDC, self.config['backupdir'] + ' is not writeable.') else: print(bcolors.FAIL + '[fail]' + bcolors.ENDC, self.config['backupdir'] + ' not exist. Create this folder.') # check if config file exists and readable if os.path.exists(self.configdir + '/' + self.configfile): print(bcolors.OKBLUE + '[ok]' + bcolors.ENDC, self.configdir + '/' + self.configfile + ' exist.') if os.access(self.configdir + '/' + self.configfile, os.R_OK): print(bcolors.OKBLUE + '[ok]' + bcolors.ENDC, self.configdir + '/' + self.configfile + ' is readable.') else: print( bcolors.FAIL + '[fail]' + bcolors.ENDC, self.configdir + '/' + self.configfile + ' is not readable.') else: print( bcolors.FAIL + '[fail]' + bcolors.ENDC, self.configdir + '/' + self.configfile + ' not exist. Create this folder.') print("----------------------# END #-----------------------------") print("") print("") print("---------------------# DB CHECK #-------------------------") # check database connection try: connection = mysql.connector.connect( host=self.config['db_host'], database=self.config['db_name'], user=self.config['db_user'], password=self.config['db_pass']) if connection.is_connected(): db_info = connection.get_server_info() print( bcolors.OKBLUE + '[ok]' + bcolors.ENDC + " connected to MySQL database... MySQL Server version on ", db_info) cursor = connection.cursor() cursor.execute("select database();") record = cursor.fetchone() print("Your connected to - ", record) except Error as e: print(bcolors.FAIL + '[fail]' + bcolors.ENDC + " error while connecting to MySQL") pass print("----------------------# END #-----------------------------") print("") print("") print("---------------------# SSH CHECK #-------------------------") if self.tr.checkconnection(): print( bcolors.OKBLUE + '[ok]' + bcolors.ENDC, 'server: ' + self.tr.server + ' and user: '******' - connection succesfuly.') else: print( bcolors.FAIL + '[fail]' + bcolors.ENDC, 'server: ' + self.tr.server + ' and user: '******' - connection failed.') print("----------------------# END #-----------------------------") print("") def __del__(self): shutil.rmtree(self.tmpdir)
class MassFunction(object): """ An object containing all relevant quantities for the mass function. The purpose of this class is to calculate many quantities associated with the dark matter halo mass function (HMF). The class is initialized to form a cosmology and takes in various options as to how to calculate all further quantities. All required outputs are provided as ``@property`` attributes for ease of access. Contains an update() method which can be passed arguments to update, in the most optimal manner. All output quantities are calculated only when needed (but stored after first calculation for quick access). Quantities related to the transfer function can be accessed through the ``transfer`` property of this object. Parameters ---------- M : array_like, optional, default ``np.linspace(10,15,501)`` The masses at which to perform analysis [units :math:`\log_{10}M_\odot h^{-1}`]. mf_fit : str or callable, optional, default ``"SMT"`` A string indicating which fitting function to use for :math:`f(\sigma)` Available options: 1. ``'PS'``: Press-Schechter form from 1974 #. ``'ST'``: Sheth-Mo-Tormen empirical fit 2001 (deprecated!) #. ``'SMT'``: Sheth-Mo-Tormen empirical fit from 2001 #. ``'Jenkins'``: Jenkins empirical fit from 2001 #. ``'Warren'``: Warren empirical fit from 2006 #. ``'Reed03'``: Reed empirical from 2003 #. ``'Reed07'``: Reed empirical from 2007 #. ``'Tinker'``: Tinker empirical from 2008 #. ``'Watson'``: Watson empirical 2012 #. ``'Watson_FoF'``: Watson Friend-of-friend fit 2012 #. ``'Crocce'``: Crocce 2010 #. ``'Courtin'``: Courtin 2011 #. ``'Angulo'``: Angulo 2012 #. ``'Angulo_Bound'``: Angulo sub-halo function 2012 #. ``'Bhattacharya'``: Bhattacharya empirical fit 2011 #. ``'Behroozi'``: Behroozi extension to Tinker for high-z 2013 Alternatively, one may define a callable function, with the signature ``func(self)``, where ``self`` is a :class:`MassFunction` object (and has access to all its attributes). This may be passed here. delta_wrt : str, {``"mean"``, ``"crit"``} Defines what the overdensity of a halo is with respect to, mean density of the universe, or critical density. delta_h : float, optional, default ``200.0`` The overdensity for the halo definition, with respect to ``delta_wrt`` user_fit : str, optional, default ``""`` A string defining a mathematical function in terms of `x`, used as the fitting function, where `x` is taken as :math:`\( \sigma \)`. Will only be applicable if ``mf_fit == "user_model"``. cut_fit : bool, optional, default ``True`` Whether to forcibly cut :math:`f(\sigma)` at bounds in literature. If false, will use whole range of `M`. delta_c : float, default ``1.686`` The critical overdensity for collapse, :math:`\delta_c` kwargs : keywords These keyword arguments are sent to the `hmf.transfer.Transfer` class. Included are all the cosmological parameters (see the docs for details). """ def __init__(self, M=None, mf_fit="ST", delta_h=200.0, delta_wrt='mean', cut_fit=True, z2=None, nz=None, delta_c=1.686, mv_scheme="trapz", **kwargs): """ Initializes some parameters """ if M is None: M = np.linspace(10, 15, 501) # A list of all available kwargs (sent to Cosmology via Transfer) self._cp = ["sigma_8", "n", "w", "cs2_lam", "t_cmb", "y_he", "N_nu", "omegan", "H0", "h", "omegab", "omegac", "omegav", "omegab_h2", "omegac_h2", "force_flat", "default"] # Set up a simple dictionary of kwargs which can be later updated self._cpdict = {k:v for k, v in kwargs.iteritems() if k in self._cp} # Set all given parameters. self.mf_fit = mf_fit self.M = M self.delta_h = delta_h self.delta_wrt = delta_wrt self.cut_fit = cut_fit self.z2 = z2 self.nz = nz self.delta_c = delta_c self.transfer = Transfer(**kwargs) self.mv_scheme = mv_scheme tools.check_kr(self.M[0], self.M[-1], self.cosmo.mean_dens, self.transfer.lnk[0], self.transfer.lnk[-1]) def update(self, **kwargs): """ Update the class with the given arguments in an optimal manner. Accepts any argument that the constructor takes. """ for key, val in kwargs.iteritems(): # The following takes care of everything specifically in this class if "_MassFunction__" + key in self.__dict__: try: doset = np.any(getattr(self, key) != val) except ValueError: doset = not np.array_equal(getattr(self, key), val) if doset: setattr(self, key, val) # We need to handle deletes in this class by parameters in Transfer here if key is 'z': if val != self.transfer.z: del self.sigma # All parameters being sent to Transfer: the_rest = {k:v for k, v in kwargs.iteritems() if "_MassFunction__" + k not in self.__dict__} # Some things are basically deleted when anything in Transfer is updated if len(the_rest) > 0: del self.delta_halo if len(the_rest) > 1 or (len(the_rest) == 1 and 'z' not in the_rest): del self._sigma_0 # The rest are sent to the Transfer class (stupid values weeded out there) self.transfer.update(**the_rest) tools.check_kr(self.M[0], self.M[-1], self.cosmo.mean_dens, self.transfer.lnk[0], self.transfer.lnk[-1]) # --- SET PROPERTIES ------------------------------------------------------- @property def M(self): return self.__M @M.setter def M(self, val): try: if len(val) == 1: raise ValueError("M must be a sequence of length > 1") except TypeError: raise TypeError("M must be a sequence of length > 1") if np.any(np.abs(np.diff(val, 2)) > 1e-5) or val[1] < val[0]: raise ValueError("M must be a linearly increasing vector! " + str(val[0]) + " " + str(val[1])) # Delete stuff dependent on it del self._sigma_0 self.__M = 10 ** val @property def delta_c(self): return self.__delta_c @delta_c.setter def delta_c(self, val): try: val = float(val) except ValueError: raise ValueError("delta_c must be a number: ", val) if val <= 0: raise ValueError("delta_c must be > 0 (", val, ")") if val > 10.0: raise ValueError("delta_c must be < 10.0 (", val, ")") self.__delta_c = val del self.fsigma @property def mv_scheme(self): return self.__mv_scheme @mv_scheme.setter def mv_scheme(self, val): if val not in ['trapz', 'simps', 'romb']: raise ValueError("mv_scheme wrong") else: self.__mv_scheme = val del self._sigma_0 @property def mf_fit(self): return self.__mf_fit @mf_fit.setter def mf_fit(self, val): # mf_fit may be a callable or a string. Try callable first. try: val(self) except: try: val = str(val) except: raise ValueError("mf_fit must be a string or callable, got ", val) if val not in Fits.mf_fits + ["Behroozi"]: raise ValueError("mf_fit is not in the list of available fitting functions: ", val) # Also delete stuff dependent on it del self.fsigma self.__mf_fit = val @property def delta_h(self): return self.__delta_h @delta_h.setter def delta_h(self, val): try: val = float(val) except ValueError: raise ValueError("delta_halo must be a number: ", val) if val <= 0: raise ValueError("delta_halo must be > 0 (", val, ")") if val > 10000: raise ValueError("delta_halo must be < 10,000 (", val, ")") self.__delta_h = val # Delete stuff dependent on it del self.delta_halo @property def delta_wrt(self): return self.__delta_wrt @delta_wrt.setter def delta_wrt(self, val): if val not in ['mean', 'crit']: raise ValueError("delta_wrt must be either 'mean' or 'crit' (", val, ")") self.__delta_wrt = val del self.delta_halo @property def z2(self): return self.__z2 @z2.setter def z2(self, val): if val is None: self.__z2 = val return try: val = float(val) except ValueError: raise ValueError("z must be a number (", val, ")") if val <= self.transfer.z: raise ValueError("z2 must be larger than z") else: self.__z2 = val del self.dndm @property def nz(self): return self.__nz @nz.setter def nz(self, val): if val is None: self.__nz = val return try: val = int(val) except ValueError: raise ValueError("nz must be an integer") if val < 1: raise ValueError("nz must be >= 1") else: self.__nz = val del self.dndm @property def cut_fit(self): return self.__cut_fit @cut_fit.setter def cut_fit(self, val): if not isinstance(val, bool): raise ValueError("cut_fit must be a bool, " + str(val)) del self.fsigma self.__cut_fit = val #-------------------------------- START NON-SET PROPERTIES ---------------------------------------------- @property def cosmo(self): """ :class:`hmf.cosmo.Cosmology` object aliased from `self.transfer.cosmo`""" return self.transfer.cosmo @property def delta_halo(self): """ Overdensity of a halo w.r.t mean density""" try: return self.__delta_halo except: if self.delta_wrt == 'mean': self.__delta_halo = self.delta_h elif self.delta_wrt == 'crit': self.__delta_halo = self.delta_h / cp.density.omega_M_z(self.transfer.z, **self.cosmo.cosmolopy_dict()) return self.__delta_halo @delta_halo.deleter def delta_halo(self): try: del self.__delta_halo del self.fsigma except: pass @property def _sigma_0(self): """ The normalised mass variance at z=0 :math:`\sigma` Notes ----- .. math:: \sigma^2(R) = \frac{1}{2\pi^2}\int_0^\infty{k^2P(k)W^2(kR)dk} """ try: return self.__sigma_0 except: self.__sigma_0 = tools.mass_variance(self.M, self.transfer._lnP_0, self.transfer.lnk, self.cosmo.mean_dens, self.mv_scheme) return self.__sigma_0 @_sigma_0.deleter def _sigma_0(self): try: del self.__sigma_0 del self._dlnsdlnm del self.sigma except: pass @property def _dlnsdlnm(self): """ The value of :math:`\left|\frac{\d \ln \sigma}{\d \ln M}\right|`, ``len=len(M)`` Notes ----- .. math:: frac{d\ln\sigma}{d\ln M} = \frac{3}{2\sigma^2\pi^2R^4}\int_0^\infty \frac{dW^2(kR)}{dM}\frac{P(k)}{k^2}dk """ try: return self.__dlnsdlnm except: self.__dlnsdlnm = tools.dlnsdlnm(self.M, self._sigma_0, self.transfer._lnP_0, self.transfer.lnk, self.cosmo.mean_dens) return self.__dlnsdlnm @_dlnsdlnm.deleter def _dlnsdlnm(self): try: del self.__dlnsdlnm del self.dndm del self.n_eff except: pass @property def sigma(self): """ The mass variance at `z`, ``len=len(M)`` """ try: return self.__sigma except: self.__sigma = self._sigma_0 * self.transfer.growth return self.__sigma @sigma.deleter def sigma(self): try: del self.__sigma del self.fsigma del self.lnsigma except: pass @property def lnsigma(self): """ Natural log of inverse mass variance, ``len=len(M)`` """ try: return self.__lnsigma except: self.__lnsigma = np.log(1 / self.sigma) return self.__lnsigma @lnsigma.deleter def lnsigma(self): try: del self.__lnsigma del self.fsigma except: pass @property def n_eff(self): """ Effective spectral index at scale of halo radius, ``len=len(M)`` """ try: return self.__n_eff except: self.__n_eff = tools.n_eff(self._dlnsdlnm) return self.__n_eff @n_eff.deleter def n_eff(self): try: del self.__n_eff except: pass @property def fsigma(self): """ The multiplicity function, :math:`f(\sigma)`, for `mf_fit`. ``len=len(M)`` """ try: return self.__fsigma except: try: self.__fsigma = self.mf_fit(self) except: fits_class = Fits(self) self.__fsigma = fits_class.nufnu() if np.sum(np.isnan(self.__fsigma)) > 0.8 * len(self.__fsigma): # the input mass range is almost completely outside the cut logger.warning("The specified mass-range was almost entirely \ outside of the limits from the fit. Ignored fit range...") self.cut_fit = False try: self.__fsigma = self.mf_fit(self) except: self.__fsigma = fits_class.nufnu() return self.__fsigma @fsigma.deleter def fsigma(self): try: del self.__fsigma del self.dndm except: pass @property def dndm(self): """ The number density of haloes, ``len=len(M)`` [units :math:`h^4 M_\odot^{-1} Mpc^{-3}`] """ try: return self.__dndm except: if self.z2 is None: # #This is normally the case self.__dndm = self.fsigma * self.cosmo.mean_dens * np.abs(self._dlnsdlnm) / self.M ** 2 if self.mf_fit == 'Behroozi': a = 1 / (1 + self.transfer.z) theta = 0.144 / (1 + np.exp(14.79 * (a - 0.213))) * (self.M / 10 ** 11.5) ** (0.5 / (1 + np.exp(6.5 * a))) ngtm_tinker = self._ngtm() ngtm_behroozi = 10 ** (theta + np.log10(ngtm_tinker)) dthetadM = 0.144 / (1 + np.exp(14.79 * (a - 0.213))) * \ (0.5 / (1 + np.exp(6.5 * a))) * (self.M / 10 ** 11.5) ** \ (0.5 / (1 + np.exp(6.5 * a)) - 1) / (10 ** 11.5) self.__dndm = self.__dndm * 10 ** theta - ngtm_behroozi * np.log(10) * dthetadM else: # #This is for a survey-volume weighted calculation if self.nz is None: self.nz = 10 zedges = np.linspace(self.transfer.z, self.z2, self.nz) zcentres = (zedges[:-1] + zedges[1:]) / 2 dndm = np.zeros_like(zcentres) vol = np.zeros_like(zedges) vol[0] = cp.distance.comoving_volume(self.transfer.z, **self.cosmo.cosmolopy_dict()) for i, zz in enumerate(zcentres): self.update(z=zz) dndm[i] = self.fsigma * self.cosmo.mean_dens * np.abs(self._dlnsdlnm) / self.M ** 2 if self.mf_fit == 'Behroozi': a = 1 / (1 + self.transfer.z) theta = 0.144 / (1 + np.exp(14.79 * (a - 0.213))) * (self.M / 10 ** 11.5) ** (0.5 / (1 + np.exp(6.5 * a))) ngtm_tinker = self._ngtm() ngtm_behroozi = 10 ** (theta + np.log10(ngtm_tinker)) dthetadM = 0.144 / (1 + np.exp(14.79 * (a - 0.213))) * (0.5 / (1 + np.exp(6.5 * a))) * (self.M / 10 ** 11.5) ** (0.5 / (1 + np.exp(6.5 * a)) - 1) / (10 ** 11.5) dndm[i] = dndm[i] * 10 ** theta - ngtm_behroozi * np.log(10) * dthetadM vol[i + 1] = cp.distance.comoving_volume(z=zedges[i + 1], **self.cosmo.cosmolopy_dict()) vol = vol[1:] - vol[:-1] # Volume in shells integrand = vol * dndm numerator = intg.simps(integrand, x=zcentres) denom = intg.simps(vol, zcentres) self.__dndm = numerator / denom return self.__dndm @dndm.deleter def dndm(self): try: del self.__dndm del self.dndlnm del self.dndlog10m except: pass @property def dndlnm(self): """ The differential mass function in terms of natural log of `M`, ``len=len(M)`` [units :math:`h^3 Mpc^{-3}`] """ try: return self.__dndlnm except: self.__dndlnm = self.M * self.dndm return self.__dndlnm @dndlnm.deleter def dndlnm(self): try: del self.__dndlnm del self.ngtm del self.nltm del self.mgtm del self.mltm del self.how_big except: pass @property def dndlog10m(self): """ The differential mass function in terms of log of `M`, ``len=len(M)`` [units :math:`h^3 Mpc^{-3}`] """ try: return self.__dndlog10m except: self.__dndlog10m = self.M * self.dndm * np.log(10) return self.__dndlog10m @dndlog10m.deleter def dndlog10m(self): try: del self.__dndlog10m except: pass def _upper_ngtm(self, M, mass_function, cut): """Calculate the mass function above given range of `M` in order to integrate""" ### WE CALCULATE THE MASS FUNCTION ABOVE THE COMPUTED RANGE ### # mass_function is logged already (not log10 though) m_upper = np.linspace(np.log(M[-1]), np.log(10 ** 18), 500) if cut: # since its been cut, the best we can do is a power law mf_func = spline(np.log(M), mass_function, k=1) mf = mf_func(m_upper) else: # We try to calculate the hmf as far as we can normally new_pert = copy.deepcopy(self) new_pert.update(M=np.log10(np.exp(m_upper))) mf = np.log(np.exp(m_upper) * new_pert.dndm) if np.isnan(mf[-1]): # Then we couldn't get up all the way, so have to do linear ext. if np.isnan(mf[1]): # Then the whole extension is nan and we have to use the original (start at 1 because 1 val won't work either) mf_func = spline(np.log(M), mass_function, k=1) mf = mf_func(m_upper) else: mfslice = mf[np.logical_not(np.isnan(mf))] m_nan = m_upper[np.isnan(mf)] m_true = m_upper[np.logical_not(np.isnan(mf))] mf_func = spline(m_true, mfslice, k=1) mf[len(mfslice):] = mf_func(m_nan) return m_upper, mf def _lower_ngtm(self, M, mass_function, cut): ### WE CALCULATE THE MASS FUNCTION BELOW THE COMPUTED RANGE ### # mass_function is logged already (not log10 though) m_lower = np.linspace(np.log(10 ** 3), np.log(M[0]), 500) if cut: # since its been cut, the best we can do is a power law mf_func = spline(np.log(M), mass_function, k=1) mf = mf_func(m_lower) else: # We try to calculate the hmf as far as we can normally new_pert = copy.deepcopy(self) new_pert.update(M=np.log10(np.exp(m_lower))) mf = np.log(np.exp(m_lower) * new_pert.dndm) if np.isnan(mf[0]): # Then we couldn't go down all the way, so have to do linear ext. mfslice = mf[np.logical_not(np.isnan(mf))] m_nan = m_lower[np.isnan(mf)] m_true = m_lower[np.logical_not(np.isnan(mf))] mf_func = spline(m_true, mfslice, k=1) mf[:len(mfslice)] = mf_func(m_nan) return m_lower, mf def _ngtm(self): """ Calculate n(>m). This function is separated from the property because of the Behroozi fit """ # set M and mass_function within computed range M = self.M[np.logical_not(np.isnan(self.dndlnm))] mass_function = self.dndlnm[np.logical_not(np.isnan(self.dndlnm))] # Calculate the mass function (and its integral) from the highest M up to 10**18 if M[-1] < 10 ** 18: m_upper, mf = self._upper_ngtm(M, np.log(mass_function), M[-1] < self.M[-1]) int_upper = intg.simps(np.exp(mf), dx=m_upper[2] - m_upper[1], even='first') else: int_upper = 0 # Calculate the cumulative integral (backwards) of mass_function (Adding on the upper integral) ngtm = np.concatenate((intg.cumtrapz(mass_function[::-1], dx=np.log(M[1]) - np.log(M[0]))[::-1], np.zeros(1))) + int_upper # We need to set ngtm back in the original length vector with nans where they were originally if len(ngtm) < len(self.M): ngtm_temp = np.zeros_like(self.dndlnm) ngtm_temp[:] = np.nan ngtm_temp[np.logical_not(np.isnan(self.dndlnm))] = ngtm ngtm = ngtm_temp return ngtm @property def ngtm(self): """ The cumulative mass function above `M`, ``len=len(M)`` [units :math:`h^3 Mpc^{-3}`] """ try: return self.__ngtm except: self.__ngtm = self._ngtm() return self.__ngtm @ngtm.deleter def ngtm(self): try: del self.__ngtm del self.how_big except: pass @property def mgtm(self): """ Mass in haloes `>M`, ``len=len(M)`` [units :math:`M_\odot h^2 Mpc^{-3}`] """ try: return self.__mgtm except: M = self.M[np.logical_not(np.isnan(self.dndlnm))] mass_function = self.dndlnm[np.logical_not(np.isnan(self.dndlnm))] # Calculate the mass function (and its integral) from the highest M up to 10**18 if M[-1] < 10 ** 18: m_upper, mf = self._upper_ngtm(M, np.log(mass_function), M[-1] < self.M[-1]) int_upper = intg.simps(np.exp(mf + m_upper) , dx=m_upper[2] - m_upper[1], even='first') else: int_upper = 0 # Calculate the cumulative integral (backwards) of mass_function (Adding on the upper integral) self.__mgtm = np.concatenate((intg.cumtrapz(mass_function[::-1] * M[::-1], dx=np.log(M[1]) - np.log(M[0]))[::-1], np.zeros(1))) + int_upper # We need to set ngtm back in the original length vector with nans where they were originally if len(self.__mgtm) < len(self.M): mgtm_temp = np.zeros_like(self.dndlnm) mgtm_temp[:] = np.nan mgtm_temp[np.logical_not(np.isnan(self.dndlnm))] = self.__mgtm self.__mgtm = mgtm_temp return self.__mgtm @mgtm.deleter def mgtm(self): try: del self.__mgtm except: pass @property def nltm(self): """ Inverse cumulative mass function, ``len=len(M)`` [units :math:`h^3 Mpc^{-3}`] """ try: return self.__nltm except: # set M and mass_function within computed range M = self.M[np.logical_not(np.isnan(self.dndlnm))] mass_function = self.dndlnm[np.logical_not(np.isnan(self.dndlnm))] # Calculate the mass function (and its integral) from 10**3 up to lowest M if M[0] > 10 ** 3: m_lower, mf = self._lower_ngtm(M, np.log(mass_function), M[0] > self.M[0]) int_lower = intg.simps(np.exp(mf), dx=m_lower[2] - m_lower[1], even='first') else: int_lower = 0 # Calculate the cumulative integral of mass_function (Adding on the lower integral) self.__nltm = np.concatenate((np.zeros(1), intg.cumtrapz(mass_function, dx=np.log(M[1]) - np.log(M[0])))) + int_lower # We need to set ngtm back in the original length vector with nans where they were originally if len(self.__nltm) < len(self.M): nltm_temp = np.zeros_like(self.dndlnm) nltm_temp[:] = np.nan nltm_temp[np.logical_not(np.isnan(self.dndlnm))] = self.__nltm self.__nltm = nltm_temp return self.__nltm @nltm.deleter def nltm(self): try: del self.__nltm except: pass @property def mltm(self): """ Total mass in haloes `<M`, ``len=len(M)`` [units :math:`M_\odot h^2 Mpc^{-3}`] """ try: return self.__mltm except: # Set M within calculated range M = self.M[np.logical_not(np.isnan(self.dndlnm))] mass_function = self.dndlnm[np.logical_not(np.isnan(self.dndlnm))] # Calculate the mass function (and its integral) from 10**3 up to lowest M if M[0] > 10 ** 3: m_lower, mf = self._lower_ngtm(M, np.log(mass_function), M[0] > self.M[0]) int_lower = intg.simps(np.exp(mf + m_lower), dx=m_lower[2] - m_lower[1], even='first') else: int_lower = 0 # Calculate the cumulative integral of mass_function (Adding on the lower integral) self.__mltm = np.concatenate((np.zeros(1), intg.cumtrapz(mass_function * M, dx=np.log(M[1]) - np.log(M[0])))) + int_lower # We need to set ngtm back in the original length vector with nans where they were originally if len(self.__mltm) < len(self.M): nltm_temp = np.zeros_like(self.dndlnm) nltm_temp[:] = np.nan nltm_temp[np.logical_not(np.isnan(self.dndlnm))] = self.__mltm self.__mltm = nltm_temp return self.__mltm @property def how_big(self): """ Size of simulation volume in which to expect one halo of mass M, ``len=len(M)`` [units :math:`Mpch^{-1}`] """ return self.ngtm ** (-1. / 3.) @how_big.deleter def how_big(self): try: del self.how_big except: pass
print("Precione 6 para realizar um estorno com split") print("Precione 7 para verificar o status de um postback") esco = raw_input("") if (esco == "1"): print("A transação numero " + str(Split()) + " foi criada!") elif (esco == "2"): print("A assinatura numero " + str(Recorrencia()) + " foi criada!") elif (esco == "3"): retorno = Transfer() print("A transferencia: " + str(retorno[0]) + ", de " + str(retorno[1]) + " centavos foi realizada para a conta bancária do recebedor " + str(retorno[2]) + "!") elif (esco == "4"): retorno = Antecipacao() print("A antecipação: " + str(retorno[0]) + ", de " + str(retorno[1]) + " centavos foi criada para o recebedor " + str(retorno[2]) + "!") elif (esco == "5"): print("A assinatura com split numero " + str(RecorrenciaSplit()) + " foi criada!")
import time from operator import itemgetter from transfer import Transfer, Mail from config import * print("\n# {:*^90} #".format("")) print("# {:^80} #".format(" 欢迎使用12306余票查询服务 ")) print("# {:^90} #".format(" Author: yuanyuanzijin ")) print("# {:^90} #".format(f" Version: {Transfer.__tf_version__} - {Transfer.__update_time__} ")) print("# {:*^90} #\n".format("")) tf = Transfer(date=DATE, transfer_cities=TRANSFER_CITIES, max_transfer_times=MAX_TRANSFER, expect_total_time=EXPECT_TOTAL_TIME, retry_times=RETRY_TIMES, expire_time=EXPIRE_TIME, min_expire_time=MIN_EXPIRE_TIME, auto_expire=AUTO_EXPIRE) print('\n') print("{:*^50}".format(" 任务信息如下 ")) print(f"出发日期:{DATE}") print(f"始发城市:{TRANSFER_CITIES[0]}") print(f"终到城市:{TRANSFER_CITIES[-1]}") print(f"可选换乘城市:{', '.join(TRANSFER_CITIES[1:-1])}") print(f"最大换乘次数:{MAX_TRANSFER}") print(f"期望总时间:<{EXPECT_TOTAL_TIME}h") print("{:*^56}".format("")) print(f"理论换乘路线:{len(tf.tasks)}个") print(f"初始缓存时间:{EXPIRE_TIME}s") print(f"最小缓存时间:{MIN_EXPIRE_TIME}s") print(f"邮件最小间隔:{MAIL_MIN_INTERVAL}s") print(f"邮件通知:{'开启' if EMAIL_ENABLE else '关闭'}") print(f"通知邮箱:{RECEIVE_EMAIL}")
def new_transfer(self): if self.transfer is None: self.transfer = Transfer() else: self.transfer.turn_on()
return self.DAILY_REPORT def daily_activity_report_output(self): self.file_name = 'SuburbanDigitalAdReport_' + self.date_strftime( ) + '.txt' output = open(self.file_name, 'w') output.write('Date: ' + str(self.todays_date()) + '\n' + str(self.daily_activity_report())) output.close() def monthly_activity_report(self): self.MONTHLY_REPORT = str(self.analytics.getMonthlyReport()) return self.MONTHLY_REPORT def monthly_activity_report_output(self): self.file_name = 'MonthlyDealerBudgetTemplate_' + self.date_strftime( ) + '.txt' output = open(self.file_name, 'w') output.write('Date: ' + str(self.todays_date()) + '\n' + str(self.monthly_activity_report())) output.close() if __name__ == "__main__": suburban = SuburbanCollection() print(suburban.todays_date()) suburban.daily_activity_report_output() suburban.monthly_activity_report_output() send = Transfer() send.ftp_daily_report()