Пример #1
0
 def __getitem__(self, key):
     if key == 'tracknumber':
         return str(self.tags['trkn'][0][0])
     elif key == 'key':
         warn('Keys are not supported for M4A files')
         return ''
     return self.tags[self.translate_key(key)][0]
Пример #2
0
def save_image():
    password = request.forms.get('password')
    if password != config['Password']:
        end(403, "wrong password!")

    upfile = request.files.get('upfile')
    if not upfile:
        end(401, "no file in the request!")

    path = os.path.join(config['MediaRoot'], upfile.raw_filename)
    if not os.path.exists(path):
        filesize = -1
        try:
            filesize = int(request.forms.get('filesize'))
        except TypeError:
            end(400, "missing file size in the request!")

        # save file
        info("upfile path: " + path)
        upfile.save(path)

        # check file size in request against written file size
        if filesize != os.stat(path).st_size:
            end(411, "file sizes do not match!")

    else:
        warn("file " + path + " already exists")
Пример #3
0
Файл: test.py Проект: lhon/aldy
def test_single(sample, location, expected, profile, threshold, gene):
	expected = [r for r in expected if not pd.isnull(r)]
	message = '{} - {}'.format(sample, ' or '.join(expected))
	message = colorize('{:30}'.format(message, 'teal'))
	if '' not in expected:
		expected = [[str(x).strip() for x in re.split('[/\+]', r)] for r in expected]
		expected = set(tuple(sorted(r, key=sortkey)) for r in expected)
	else:
		expected = set()

	try:
		solutions = genotype.genotype(location, gene, profile, threshold)
	except:
		logbook.error('{} {}', message, colorize('CRASH ', 'red'))
		exit(1)

	def fix(s):
		return re.split('(\d+)', s)[1]
	orig_solutions = solutions
	solutions = set(tuple(sorted((fix(p) for p in s), key=sortkey)) for s in solutions)
	expected = set(tuple(sorted((fix(p) for p in s), key=sortkey)) for s in expected)

	if solutions == expected:
		logbook.info('{} {} {}', message, colorize('OK   ', 'green'), list(orig_solutions))
		return 1
	elif solutions <= expected and len(solutions) != 0:
		logbook.info('{} {} {}', message, colorize('OK<  ', 'green'), list(orig_solutions))
		return 2
	elif len(expected & solutions) > 0:
		logbook.warn('{} {} {}', message, colorize('MULTI', 'yellow'), list(orig_solutions))
		return 3
	else:
		logbook.error('{} {} {}', message, colorize('FAIL ', 'red'), list(orig_solutions))
		return 0
Пример #4
0
Файл: test.py Проект: lhon/aldy
def test_samples(samples, location, profile, threshold, gene='cyp2d6'):
	from pathos.multiprocessing import ProcessingPool as Pool
	
	pool = Pool(processes=16)
	
	def f(s):
		global counter
		sample, expected = s[0], s[1:]
		# if sample not in T: continue
		loc = location.format(sample)
		if not os.path.exists(loc): # or sample == 'PGXT122':
			return 0
		try:
			res = test_single(sample, loc, expected, profile, threshold, gene)
		except Exception as e:
			print(s, e)
			res = 0
		# sys.stderr.write('-')
		return res
	
	# sys.stderr.write(' ' * len(samples) + '|\r')
	result = pool.map(f, samples)
	logbook.warn(
		'Passed {} out of {} ({} subset, {} multi)\n',
		colorize(str(len([x for x in result if x > 0])), 'green'),
		colorize(str(len(result)), 'blue'),
		colorize(str(len([x for x in result if x == 2])), 'yellow'),
		colorize(str(len([x for x in result if x == 3])), 'yellow')
	)
Пример #5
0
 def send_back():
     handler = MultiProcessingHandler(queue)
     handler.push_thread()
     try:
         logbook.warn(message)
     finally:
         handler.pop_thread()
Пример #6
0
 def test_calling_frame(self):
     handler = logbook.TestHandler()
     handler.push_thread()
     try:
         logbook.warn('test')
     finally:
         handler.pop_thread()
     self.assertEqual(handler.records[0].calling_frame, sys._getframe())
Пример #7
0
 def __call__(self):
     from logbook.queues import MultiProcessingHandler
     handler = MultiProcessingHandler(self.queue)
     handler.push_thread()
     try:
         logbook.warn('Hello World')
     finally:
         handler.pop_thread()
Пример #8
0
 def __call__(self):
     from logbook.queues import MultiProcessingHandler
     handler = MultiProcessingHandler(self.queue)
     handler.push_thread()
     try:
         logbook.warn('Hello World')
     finally:
         handler.pop_thread()
Пример #9
0
 def __setitem__(self, key, value):
     if key == 'tracknumber':
         k, n = self.tags['trkn'][0]
         self.tags['trkn'][0] = (int(value), n)
     elif key == 'key':
         warn('Keys are not supported for M4A files')
         pass
     else:
         self.tags[self.translate_key(key)] = [value]
Пример #10
0
 def test_blackhole_setting(self):
     null_handler = logbook.NullHandler()
     heavy_init = logbook.LogRecord.heavy_init
     try:
         def new_heavy_init(self):
             raise RuntimeError('should not be triggered')
         logbook.LogRecord.heavy_init = new_heavy_init
         with null_handler:
             logbook.warn('Awesome')
     finally:
         logbook.LogRecord.heavy_init = heavy_init
Пример #11
0
def scrapeRuns():
    runs = []
    today = datetime.now()
    for meetingURL in RPTodayRaces(today).cardurls:
        try:
            race_card = RPRaceCard(meetingURL)
            for nag in race_card.runners:
                runs.append(
                    Run(nag["name"], race_card.location, race_card.time))
        except Exception, e:
            warn("url=" + meetingURL + " " + str(e))
Пример #12
0
    def parse_nutrition_str(self, nutrition_str):
        nutrition_infos = {}
        nutrition_str = nutrition_str.replace(u" \u2013 pro durchschnittlicher Portion", "")
        nutrition_arr = nutrition_str.split()
        
        for i in xrange(0, len(nutrition_arr), 2):
            try:
                nutrition_infos[nutrition_arr[i]] = nutrition_arr[i+1]
            except IndexError:
                warn("Malformatted nutrition string: '%s'" % nutrition_str)

        return nutrition_infos
Пример #13
0
def scrapeRuns():
    runs = []
    today = datetime.now()
    for meetingURL in RPTodayRaces(today).cardurls:
        try:
            race_card = RPRaceCard(meetingURL)
            for nag in race_card.runners:
                runs.append(
                        Run( nag["name"] , race_card.location, race_card.time)
                        )
        except Exception, e:
            warn("url=" + meetingURL + " " + str(e))
def save_image():
    password = request.forms.get("password")
    if password != PASSWORD:
        abort(403, "ERROR: wrong password!")

    upfile = request.files.get("upfile")
    if not upfile:
        abort(401, "ERROR: no file in the request!")

    path = os.path.join(MEDIA_ROOT, upfile.raw_filename)
    if not os.path.exists(path):
        debug("upfile path: " + path)
        upfile.save(path)
    else:
        warn("file " + path + " already exists")
Пример #15
0
def test_single(sample, location, expected, profile, gene, solver):
    expected = [r for r in expected if not pd.isnull(r)]

    message = '{}::{}::{}'.format(sample, solver[:2], gene)
    expected = [[
        str(x).strip() for x in re.split(r'[/\+]', r) if str(x).strip() != ''
    ] for r in expected]
    expected = [tuple(sorted(r, key=sortkey)) for r in expected]

    def fix(s):
        return re.split(r'(\d+)', s)[1]

    expected = [
        tuple(sorted((fix(p) for p in s), key=sortkey)) for s in expected
    ]

    expected, expected_new = set(expected), set(expected[1:])

    solutions = genotype.genotype(
        location,
        'tmp/{}_{}_{}.out'.format(sample, gene, profile),
        'tmp/{}_{}_{}.log'.format(sample, gene, profile),
        gene,
        profile,
        0.5,
        solver,
        cn_solution=None,
        reference='/data/cb/inumanag/aldy/cram-genome.fa',
        cn_neutral_region=None)

    orig_solutions = '; '.join(','.join(s[1]) for s in solutions)
    orig_expected = '; '.join(','.join(s) for s in expected)
    solutions = set(
        tuple(sorted((fix(p) for p in s[1]), key=sortkey)) for s in solutions)

    if solutions == expected:
        logbook.warn('{:20} {} {:25} == {}', message,
                     colorize('OK   ', 'green'), orig_solutions, orig_expected)
        return 1
    elif solutions <= expected and len(solutions) != 0:
        if solutions == expected_new:
            logbook.warn('{:20} {} {:25} == {}', message,
                         colorize('OK=  ', 'green'), orig_solutions,
                         orig_expected)
        else:
            logbook.warn('{:20} {} {:25} <= {}', message,
                         colorize('OK?  ', 'green'), orig_solutions,
                         orig_expected)
        return 2
    elif len(expected & solutions) > 0:
        logbook.warn('{:20} {} {:25} =~ {}', message,
                     colorize('MULT ', 'yellow'), orig_solutions,
                     orig_expected)
        return 3
    else:
        logbook.error('{:20} {} {:25} != {}', message,
                      colorize('FAIL ', 'red'), orig_solutions, orig_expected)
        return 0
Пример #16
0
def save_tags(filepath, fragments, genome=None,
              re_offset=0):
    def get_tag_intervals():
        for frag in fragments:
            # rev primer
            rname = frag.left_primer.name
            if rname.endswith('_fwd') or rname.endswith('_rev'):
                rname = rname[:-4]
            rname += '_rev'
            rev_tag = get_tag_interval(frag.left_primer,
                                       frag.left_rsite, name=rname,
                                       re_offset=re_offset)
            yield ('left_primer', rev_tag)
            # fwd primer
            fname = frag.right_primer.name
            if fname.endswith('_fwd') or fname.endswith('_rev'):
                fname = fname[:-4]
            fname += '_fwd'
            fwd_tag = get_tag_interval(frag.right_primer,
                                       frag.right_rsite, name=fname,
                                       re_offset=re_offset)
            yield ('right_primer', fwd_tag)

    notice('called')
    if genome is None:
        with open(filepath, 'w') as f:
            for prim_loc, x in get_tag_intervals():
                f.write(bedentry_as_string(x, extra=prim_loc))
        return
    z = collections.defaultdict(set)
    for prim_loc, x in get_tag_intervals():
        seq = genome[x.chrom][x.start:x.end]
        if prim_loc == 'left_primer':
            assert x.strand == '-'
            seq = seq.reverse_complement()
        else:
            assert x.strand == '+'
        seq = seq.seq.tostring()
        if seq in z[x.name]:
            warn('%s has multiple identical tag sequences.' % x.name)
        else:
            z[x.name].add(seq)
    with open(filepath, 'w') as f:
        for name in sorted(z):
            v = z[name]
            while len(v):
                f.write('>%s\n' % name)
                f.write('%s\n' % v.pop())
Пример #17
0
    def test_blackhole_setting(self):
        null_handler = logbook.NullHandler()
        heavy_init = logbook.LogRecord.heavy_init
        try:
            def new_heavy_init(self):
                raise RuntimeError('should not be triggered')
            logbook.LogRecord.heavy_init = new_heavy_init
            with null_handler:
                logbook.warn('Awesome')
        finally:
            logbook.LogRecord.heavy_init = heavy_init

        null_handler.bubble = True
        with capture_stderr() as captured:
            logbook.warning('Not a blockhole')
            self.assertNotEqual(captured.getvalue(), '')
Пример #18
0
    def test_blackhole_setting(self):
        null_handler = logbook.NullHandler()
        heavy_init = logbook.LogRecord.heavy_init
        try:
            def new_heavy_init(self):
                raise RuntimeError('should not be triggered')
            logbook.LogRecord.heavy_init = new_heavy_init
            with null_handler:
                logbook.warn('Awesome')
        finally:
            logbook.LogRecord.heavy_init = heavy_init

        null_handler.bubble = True
        with capture_stderr() as captured:
            logbook.warning('Not a blockhole')
            self.assertNotEqual(captured.getvalue(), '')
Пример #19
0
def count(doc):
    phone_num = doc['phone']
    if len(phone_num) > 12 or not phone_num.isdigit():
        logbook.warn('anomaly phone num: {}'.format(phone_num))
        return
    phone_position = int(phone_num[:7])
    position_doc = m_phone.phone.find_one({'_id':phone_position})
    if not position_doc:
        logbook.warn('can not select the num: {}'.format(phone_num))
        return
    key_list = [
        'province:{}'.format(position_doc['province'].encode('utf8')),
        'city:{}'.format(position_doc['city'].encode('utf8')),]
    msetnx_dict = {item: 0 for item in key_list}
    r_db.msetnx(msetnx_dict)
    incr = lambda key: r_db.incr(key)
    map(incr, key_list)
Пример #20
0
def test_blackhole_setting(activation_strategy):
    null_handler = logbook.NullHandler()
    heavy_init = logbook.LogRecord.heavy_init
    with activation_strategy(null_handler):
        def new_heavy_init(self):
            raise RuntimeError('should not be triggered')
        logbook.LogRecord.heavy_init = new_heavy_init
        try:
            with activation_strategy(null_handler):
                logbook.warn('Awesome')
        finally:
            logbook.LogRecord.heavy_init = heavy_init

    null_handler.bubble = True
    with capturing_stderr_context() as captured:
        logbook.warning('Not a blockhole')
        assert captured.getvalue() != ''
Пример #21
0
def count(doc):
    phone_num = doc['phone']
    if len(phone_num) > 12 or not phone_num.isdigit():
        logbook.warn('anomaly phone num: {}'.format(phone_num))
        return
    phone_position = int(phone_num[:7])
    position_doc = m_phone.phone.find_one({'_id': phone_position})
    if not position_doc:
        logbook.warn('can not select the num: {}'.format(phone_num))
        return
    key_list = [
        'province:{}'.format(position_doc['province'].encode('utf8')),
        'city:{}'.format(position_doc['city'].encode('utf8')),
    ]
    msetnx_dict = {item: 0 for item in key_list}
    r_db.msetnx(msetnx_dict)
    incr = lambda key: r_db.incr(key)
    map(incr, key_list)
def create_settings_file():
    filename = 'photobackup_settings.py'
    global input

    # Python2 compatibility for input()
    try:
        input = raw_input
    except NameError:
        pass

    # ask for the upload directory (should be writable by the server)
    media_root = input("The directory where to put the pictures" +
                       " (should be writable by the server you use): ")
    if not os.path.isdir(media_root):
        notice("Directory {} does not exist, creating it".format(media_root))
        os.mkdir(media_root)
    server_user = input("Owner of the directory [www-data]: ")
    if not server_user:
        server_user = '******'

    try:
        server_user_uid = pwd.getpwnam(server_user).pw_uid
        if os.stat(media_root).st_uid != server_user_uid:
            notice("Changing owner to: ".format(server_user))
            try:
                shutil.chown(media_root, server_user, server_user)
            except AttributeError:
                warn("Can't change directory's owner, please do it correctly!")
    except KeyError:
        warn("User {} not found, please check the directory's rights."
             .format(server_user))

    # ask a password for the server
    text = "The server password that you use in the mobile app: "
    password = getpass.getpass(prompt=text)
    passhash = hashlib.sha512(password.encode('utf-8')).hexdigest()

    with open(filename, 'w') as settings:
        settings.write("# generated settings for PhotoBackup Bottle server\n")
        settings.write("MEDIA_ROOT = '{}'\n".format(media_root))
        settings.write("PASSWORD = '******'\n".format(passhash))

    notice("Settings file is created, please launch me again!")
    return media_root, passhash
Пример #23
0
def test_blackhole_setting(activation_strategy):
    null_handler = logbook.NullHandler()
    heavy_init = logbook.LogRecord.heavy_init
    with activation_strategy(null_handler):

        def new_heavy_init(self):
            raise RuntimeError('should not be triggered')

        logbook.LogRecord.heavy_init = new_heavy_init
        try:
            with activation_strategy(null_handler):
                logbook.warn('Awesome')
        finally:
            logbook.LogRecord.heavy_init = heavy_init

    null_handler.bubble = True
    with capturing_stderr_context() as captured:
        logbook.warning('Not a blockhole')
        assert captured.getvalue() != ''
Пример #24
0
def simple_pkg_ver(pkg, ver):
    u"""
    pip справшивает где скачать пакет такой-то версии — даём ссылку на самих себя
    """
    from pygift.tools import version_validate

    if tools.can_be_proxied(pkg):
        return proxy.simple_pkg_ver(pkg, ver)

    # если пакет публичный - его могут попробовать поставить по обычному номеру версии
    if not version_validate(ver):
        if not tools.is_public(pkg):
            warn("unsupported version requested: {!r}=={!r}", pkg, ver)
            return '<!-- unsupported version format --!>'
        else:
            debug("unsupported version format, yet public pkg, simulating proxy: {!r}=={!r}", pkg, ver)
            return proxy.json2simple_pkg_ver(pkg, ver)

    url = url_for('pkg_generate', pkg=pkg, ver=ver)
    return '<a href="{url}">{pkg}-{ver}</a>'.format(url=escape(url), pkg=escape(pkg), ver=escape(ver))
Пример #25
0
def save(line):
    '''
    save the code-position to mongo
    '''

    # format the fileline
    info = line.strip('\r\n').strip(' ').split(' ')
    info[0] = int(info[0])
    info[1] = info[1].decode('gb2312').encode('utf8')
    if len(info) > 2:
        logbook.warn(info)

    # save the info to mongo
    try:
        doc = {'_id': info[0], 'position':info[1]}
        ret = m.position.save(doc)
    except StandardError, error_info:
        logbook.error(error_info)
        logbook.warn('code:{}, position:{}, original:'.format(
            info[0], info[1], info,
            ))
Пример #26
0
def test_global_functions(activation_strategy):
    with activation_strategy(logbook.TestHandler()) as handler:
        logbook.debug('a debug message')
        logbook.info('an info message')
        logbook.warn('warning part 1')
        logbook.warning('warning part 2')
        logbook.notice('notice')
        logbook.error('an error')
        logbook.critical('pretty critical')
        logbook.log(logbook.CRITICAL, 'critical too')

    assert handler.has_debug('a debug message')
    assert handler.has_info('an info message')
    assert handler.has_warning('warning part 1')
    assert handler.has_warning('warning part 2')
    assert handler.has_notice('notice')
    assert handler.has_error('an error')
    assert handler.has_critical('pretty critical')
    assert handler.has_critical('critical too')
    assert handler.records[0].channel == 'Generic'
    assert handler.records[0].dispatcher is None
Пример #27
0
 def test_global_functions(self):
     handler = logbook.TestHandler()
     with handler:
         logbook.debug('a debug message')
         logbook.info('an info message')
         logbook.warn('warning part 1')
         logbook.warning('warning part 2')
         logbook.notice('notice')
         logbook.error('an error')
         logbook.critical('pretty critical')
         logbook.log(logbook.CRITICAL, 'critical too')
     self.assert_(handler.has_debug('a debug message'))
     self.assert_(handler.has_info('an info message'))
     self.assert_(handler.has_warning('warning part 1'))
     self.assert_(handler.has_warning('warning part 2'))
     self.assert_(handler.has_notice('notice'))
     self.assert_(handler.has_error('an error'))
     self.assert_(handler.has_critical('pretty critical'))
     self.assert_(handler.has_critical('critical too'))
     self.assertEqual(handler.records[0].logger_name, 'generic')
     self.assertEqual(handler.records[0].channel, None)
Пример #28
0
 def test_global_functions(self):
     handler = logbook.TestHandler()
     with handler:
         logbook.debug('a debug message')
         logbook.info('an info message')
         logbook.warn('warning part 1')
         logbook.warning('warning part 2')
         logbook.notice('notice')
         logbook.error('an error')
         logbook.critical('pretty critical')
         logbook.log(logbook.CRITICAL, 'critical too')
     self.assert_(handler.has_debug('a debug message'))
     self.assert_(handler.has_info('an info message'))
     self.assert_(handler.has_warning('warning part 1'))
     self.assert_(handler.has_warning('warning part 2'))
     self.assert_(handler.has_notice('notice'))
     self.assert_(handler.has_error('an error'))
     self.assert_(handler.has_critical('pretty critical'))
     self.assert_(handler.has_critical('critical too'))
     self.assertEqual(handler.records[0].channel, 'Generic')
     self.assertEqual(handler.records[0].dispatcher, None)
Пример #29
0
def save_county():
    while r_db.randomkey:
        key = r_db.randomkey()
        if not key.isdigit():
            logbook.warn(key)
            continue
        count = int(r_db.get(key))
        parent_id = int(key) / 100 * 100
        parent = m_db.card.city.find_one({'_id': parent_id})
        if not parent:
            parent = {'_id': int(parent_id), 'position': '未知'.decode('utf8')}
        self_doc = m_db.position.find_one({'_id': key})
        if not self_doc:
            self_doc = {'_id': int(key), 'position': '未知'.decode('utf8')}
        self_doc['count'] = count
        self_doc['position'] = '{parent} {self}'.format(
            parent=parent['position'].encode('utf8'),
            self=self_doc['position'].encode('utf8'),
        )
        ret = m_db.card.county.save(self_doc)
        logbook.info(ret)
        r_db.delete(key)
Пример #30
0
def save(line):
    '''
    save the code-position to mongo
    '''

    # format the fileline
    info = line.strip('\r\n').strip(' ').split(' ')
    info[0] = int(info[0])
    info[1] = info[1].decode('gb2312').encode('utf8')
    if len(info) > 2:
        logbook.warn(info)

    # save the info to mongo
    try:
        doc = {'_id': info[0], 'position': info[1]}
        ret = m.position.save(doc)
    except StandardError, error_info:
        logbook.error(error_info)
        logbook.warn('code:{}, position:{}, original:'.format(
            info[0],
            info[1],
            info,
        ))
Пример #31
0
 def _reread(self):
     conf_name = 'pygift.json'
     conf_dirs = (
         os.path.dirname(os.path.abspath(__file__)),
         '/etc',
     )
     conf_paths = (
         os.path.join(conf_dir, conf_name)
         for conf_dir in conf_dirs
     )
     self._path = next(
         (path for path in conf_paths if os.path.exists(path)),
         None
     )
     if not self._path:
         self._data = {}
         warn('config not found')
     else:
         info('using config {!r}', self._path)
         self._mtime = os.path.getmtime(self._path)
         with open(self._path) as f:
             self._data = demjson.decode(f.read())
     self._initialized = True
Пример #32
0
def save_county():
    while r_db.randomkey:
        key = r_db.randomkey()
        if not key.isdigit():
            logbook.warn(key)
            continue
        count = int(r_db.get(key))
        parent_id = int(key) / 100 * 100
        parent = m_db.card.city.find_one({'_id': parent_id})
        if not parent:
            parent = {
                '_id': int(parent_id), 'position': '未知'.decode('utf8')}
        self_doc = m_db.position.find_one({'_id': key})
        if not self_doc:
            self_doc = {
                '_id': int(key), 'position': '未知'.decode('utf8')}
        self_doc['count'] = count
        self_doc['position'] = '{parent} {self}'.format(
            parent=parent['position'].encode('utf8'),
            self=self_doc['position'].encode('utf8'),)
        ret = m_db.card.county.save(self_doc)
        logbook.info(ret)
        r_db.delete(key)
Пример #33
0
 def test_global_functions(self):
     handler = logbook.TestHandler()
     handler.push_thread()
     try:
         logbook.debug('a debug message')
         logbook.info('an info message')
         logbook.warn('warning part 1')
         logbook.warning('warning part 2')
         logbook.notice('notice')
         logbook.error('an error')
         logbook.critical('pretty critical')
         logbook.log(logbook.CRITICAL, 'critical too')
     finally:
         handler.pop_thread()
     self.assert_(handler.has_debug('a debug message'))
     self.assert_(handler.has_info('an info message'))
     self.assert_(handler.has_warning('warning part 1'))
     self.assert_(handler.has_warning('warning part 2'))
     self.assert_(handler.has_notice('notice'))
     self.assert_(handler.has_error('an error'))
     self.assert_(handler.has_critical('pretty critical'))
     self.assert_(handler.has_critical('critical too'))
     self.assertEqual(handler.records[0].channel, 'Generic')
     self.assertEqual(handler.records[0].dispatcher, None)
Пример #34
0
def main():
    start = time.time()

    args = parse_args()
    wwise_dir = Path(args.wwise_dir)
    out_dir = Path(args.out_dir)
    out_dir.mkdir(exist_ok=True)
    id_to_filename_path = out_dir / ID_TO_FILENAME

    manager = mp.Manager()
    queue = manager.Queue()
    id_queue = manager.Queue()

    setup_logging(queue)

    target_handlers = logbook.NestedSetup([
        logbook.NullHandler(),
        logbook.StreamHandler(sys.stdout, level=logbook.INFO, bubble=True),
        logbook.FileHandler("extract.log",
                            mode="w",
                            level=logbook.INFO,
                            bubble=True),
    ])

    sub = MultiProcessingSubscriber(queue)
    controller = sub.dispatch_in_background(target_handlers)

    quickbms_log_lock = manager.Lock()
    quickbms_log = out_dir / "quickbms.log"

    try:
        id_to_filename_path.unlink()
        logbook.info("removed old {id_file}",
                     id_file=id_to_filename_path.absolute())
    except FileNotFoundError:
        pass

    logbook.info("QuickBMS log: '{qlog}'", qlog=quickbms_log.absolute())
    try:
        quickbms_log.unlink()
        logbook.info("removed old {f}", f=quickbms_log.absolute())
    except FileNotFoundError:
        pass

    id_to_filename_path.touch()
    logbook.info("writing old ID -> new filename info in '{id_file}'",
                 id_file=id_to_filename_path.absolute())

    id_to_filename_p = mp.Process(target=id_to_filename_worker,
                                  args=(id_queue, id_to_filename_path, queue))
    id_to_filename_p.start()

    logbook.info("processing audio files in '{wd}'", wd=wwise_dir.absolute())

    fut2func = {}
    # Parse .bnk files and metadata.
    with ProcessPoolExecutor(max_workers=MAX_WORKERS) as executor:
        fut2func[executor.submit(parse_banks_metadata, wwise_dir,
                                 queue)] = parse_banks_metadata
        fut2func[executor.submit(decode_banks, wwise_dir, out_dir,
                                 quickbms_log, quickbms_log_lock,
                                 queue)] = decode_banks

    memory_bnk_meta_file2metadata = {}
    streamed_bnk_meta_file2metadata = {}
    orig_bnk2decode_info = {}
    for completed_fut in futures.as_completed(fut2func):
        if fut2func[completed_fut] == parse_banks_metadata:
            result = completed_fut.result()
            memory_bnk_meta_file2metadata = result[0]
            streamed_bnk_meta_file2metadata = result[1]
        elif fut2func[completed_fut] == decode_banks:
            orig_bnk2decode_info = completed_fut.result()

    if len(memory_bnk_meta_file2metadata) != len(orig_bnk2decode_info):
        logbook.warning(
            "Amount of Bank and metadata files "
            "do not match ({first}) != {second})",
            first=len(orig_bnk2decode_info),
            second=len(memory_bnk_meta_file2metadata))

        s1 = memory_bnk_meta_file2metadata.keys()
        s2 = set([key.stem for key in orig_bnk2decode_info])

        to_del = []
        diff = s2.difference(s1)
        for d in diff:
            # TODO: expensive!
            for key in orig_bnk2decode_info:
                if key.stem == d:
                    logbook.warn("ignoring {f}", f=str(key))
                    to_del.append(key)

        for td in to_del:
            del orig_bnk2decode_info[td]

    wem_src2wem_dst = {}
    # Move .wem files to out_dir in correct places.
    with ProcessPoolExecutor(max_workers=MAX_WORKERS) as executor:
        for bnk_meta_file, meta in streamed_bnk_meta_file2metadata.items():
            for m in meta:
                src_dir = bnk_meta_file.parent
                src = src_dir / Path(m.generated_audio_file)
                if src.exists():
                    wwise_path = Path(m.wwise_object_path)
                    dst = out_dir / wwise_path.relative_to(
                        wwise_path.anchor).with_suffix(".wem")
                    executor.submit(copy, src, dst, queue, id_queue)
                    wem_src2wem_dst[src] = dst
                else:
                    logbook.warning(
                        "found references to {src} in metadata, but "
                        "the file cannot be found in wwise_dir",
                        src=src)

    decoded_file2metas = {}

    for orig_bnk_file, decode_info in orig_bnk2decode_info.items():
        orig_bnk_file = orig_bnk_file.stem
        meta = memory_bnk_meta_file2metadata[orig_bnk_file]

        if len(decode_info) != len(meta):
            logbook.error(
                "decode_info and meta length mismatch: "
                "{len1} != {len2} for bnk: '{bnk}'",
                len1=len(decode_info),
                len2=len(meta),
                bnk=orig_bnk_file)
            # print(decode_info)
            # print(meta)
            continue
            # raise ValueError(f"decode_info and meta length mismatch "
            #                  f"{len(decode_info)} != {len(meta)}")

        for m, (decoded_stem, decoded_size) in zip(meta, decode_info.items()):
            if m.data_size != decoded_size:
                # raise ValueError(f"{m.data_size} != {decoded_size}")
                logbook.error(
                    "metadata size and decoded data size length mismatch: "
                    "{len1} != {len2}",
                    len1=m.data_size,
                    len2=decoded_size)
                continue
            decoded_file2metas[decoded_stem] = m

    fs = []
    # Move output from decoding .bnk files to correct places in out_dir.
    executor = ProcessPoolExecutor(max_workers=MAX_WORKERS)
    for decoded_file, meta in decoded_file2metas.items():
        src = out_dir / f"{decoded_file}.bin"
        wwise_path = Path(meta.wwise_object_path)
        dst = out_dir / wwise_path.relative_to(
            wwise_path.anchor).with_suffix(".bin")
        fs.append(executor.submit(move, src, dst, queue, id_queue))

    futures.wait(fs, return_when=futures.ALL_COMPLETED)

    fs = []
    # Convert all .wem and .bin files to .ogg.
    executor = ProcessPoolExecutor(max_workers=MAX_WORKERS)
    for bin_file in out_dir.rglob("*.bin"):
        fs.append(executor.submit(ww2ogg, bin_file, queue))
    for wem_file in out_dir.rglob("*.wem"):
        fs.append(executor.submit(ww2ogg, wem_file, queue))

    futures.wait(fs, return_when=futures.ALL_COMPLETED)

    done_wems_stems = set([ws.stem for ws in wem_src2wem_dst.keys()])
    source_wems = [w for w in wwise_dir.rglob("*.wem")]
    source_wems_stems = set([w.stem for w in source_wems])
    wem_diff = source_wems_stems.difference(done_wems_stems)

    if wem_diff:
        logbook.warn("failed to determine filename for "
                     "{num} files",
                     num=len(wem_diff))

    for ws in source_wems:
        if str(ws.stem) in wem_diff:
            logbook.info("couldn't determine filename for: {ws}", ws=ws)
            copy_seq(ws, out_dir, id_to_filename_queue=None)

    # Convert leftovers.
    leftovers_fs = []
    with ProcessPoolExecutor(max_workers=MAX_WORKERS) as executor:
        for wem_file in out_dir.rglob("*.wem"):
            leftovers_fs.append(executor.submit(ww2ogg, wem_file, queue))

    futures.wait(leftovers_fs, return_when=futures.ALL_COMPLETED)

    id_queue.put(SENTINEL)
    id_to_filename_p.join()

    secs = time.time() - start
    logbook.info("finished successfully in {secs:.2f} seconds", secs=secs)

    controller.stop()
Пример #35
0
 def test_calling_frame(self):
     handler = logbook.TestHandler()
     with handler:
         logbook.warn('test')
     self.assertEqual(handler.records[0].calling_frame, sys._getframe())
Пример #36
0
def convert_latlon_arr(in_lat, in_lon, height, dtime, code="G2A"):
    """Converts between geomagnetic coordinates and AACGM coordinates.

    Parameters
    ------------
    in_lat : (np.ndarray or list or float)
        Input latitude in degrees N (code specifies type of latitude)
    in_lon : (np.ndarray or list or float)
        Input longitude in degrees E (code specifies type of longitude)
    height : (np.ndarray or list or float)
        Altitude above the surface of the earth in km
    dtime : (datetime)
        Single datetime object for magnetic field
    code : (int or str)
        Bit code or string denoting which type(s) of conversion to perform
        G2A        - geographic (geodetic) to AACGM-v2
        A2G        - AACGM-v2 to geographic (geodetic)
        TRACE      - use field-line tracing, not coefficients
        ALLOWTRACE - use trace only above 2000 km
        BADIDEA    - use coefficients above 2000 km
        GEOCENTRIC - assume inputs are geocentric w/ RE=6371.2
        (default = "G2A")

    Returns
    -------
    out_lat : (np.ndarray)
        Output latitudes in degrees N
    out_lon : (np.ndarray)
        Output longitudes in degrees E
    out_r : (np.ndarray)
        Geocentric radial distance (R_Earth) or altitude above the surface of
        the Earth (km)

    Notes
    -------
    At least one of in_lat, in_lon, and height must be a list or array.
    """
    import aacgmv2._aacgmv2 as c_aacgmv2

    # If a list was entered instead of a numpy array, recast it here
    if isinstance(in_lat, list):
        in_lat = np.array(in_lat)

    if isinstance(in_lon, list):
        in_lon = np.array(in_lon)

    if isinstance(height, list):
        height = np.array(height)

    # If one or two of these elements is a float or int, create an array
    test_array = np.array([
        hasattr(in_lat, "shape"),
        hasattr(in_lon, "shape"),
        hasattr(height, "shape")
    ])
    if not test_array.all():
        if test_array.any():
            arr_shape = in_lat.shape if test_array.argmax() == 0 else \
                        (in_lon.shape if test_array.argmax() == 1 else
                         height.shape)
            if not test_array[0]:
                in_lat = np.ones(shape=arr_shape, dtype=float) * in_lat
            if not test_array[1]:
                in_lon = np.ones(shape=arr_shape, dtype=float) * in_lon
            if not test_array[2]:
                height = np.ones(shape=arr_shape, dtype=float) * height
        else:
            logging.info(
                "for a single location, consider using convert_latlon")
            in_lat = np.array([in_lat])
            in_lon = np.array([in_lon])
            height = np.array([height])

    # Ensure that lat, lon, and height are the same length or if the lengths
    # differ that the different ones contain only a single value
    if not (in_lat.shape == in_lon.shape and in_lat.shape == height.shape):
        ulen = np.unique([in_lat.shape, in_lon.shape, height.shape])
        if ulen.min() != (1, ):
            logging.error("mismatched input arrays")
            return None, None, None

    # Test time
    if isinstance(dtime, dt.date):
        dtime = dt.datetime.combine(dtime, dt.time(0))

    assert isinstance(dtime, dt.datetime), \
        logging.error('time must be specified as datetime object')

    # Test height
    if np.min(height) < 0:
        logging.warn('conversion not intended for altitudes < 0 km')

    # Initialise output
    lat_out = np.empty(shape=in_lat.shape, dtype=float) * np.nan
    lon_out = np.empty(shape=in_lon.shape, dtype=float) * np.nan
    r_out = np.empty(shape=height.shape, dtype=float) * np.nan

    # Test code
    try:
        code = code.upper()

        if (np.nanmax(height) > 2000 and code.find("TRACE") < 0
                and code.find("ALLOWTRACE") < 0 and code.find("BADIDEA") < 0):
            estr = 'coefficients are not valid for altitudes above 2000 km. You'
            estr += ' must either use field-line tracing (trace=True '
            estr += 'or allowtrace=True) or indicate you know this '
            estr += 'is a bad idea'
            logging.error(estr)
            return lat_out, lon_out, r_out

        # make flag
        bit_code = convert_str_to_bit(code)
    except AttributeError:
        bit_code = code

    assert isinstance(bit_code, int), \
        logging.error("unknown code {:}".format(bit_code))

    # Test latitude range
    if np.abs(in_lat).max() > 90.0:
        assert np.abs(in_lat).max() <= 90.1, \
            logging.error('unrealistic latitude')
        in_lat = np.clip(in_lat, -90.0, 90.0)

    # Constrain longitudes between -180 and 180
    in_lon = ((in_lon + 180.0) % 360.0) - 180.0

    # Set current date and time
    try:
        c_aacgmv2.set_datetime(dtime.year, dtime.month, dtime.day, dtime.hour,
                               dtime.minute, dtime.second)
    except:
        raise RuntimeError("unable to set time for {:}".format(dtime))

    # Vectorise the AACGM code
    convert_vectorised = np.vectorize(c_aacgmv2.convert)

    # convert
    try:
        lat_out, lon_out, r_out = convert_vectorised(in_lat, in_lon, height,
                                                     bit_code)
    except:
        pass

    return lat_out, lon_out, r_out
Пример #37
0
 def send_back():
     with MultiProcessingHandler(queue):
         logbook.warn(message)
Пример #38
0
from logbook import warn, StreamHandler
import sys

from termcc.cc import cc

my_handler = StreamHandler(sys.stdout)
my_handler.push_application()
warn(cc(':red: :yin_yang: This is a warning :reset:'))

import os
from logbook import Processor


def inject_cwd(record):
    record.extra['cwd'] = os.getcwd()


with my_handler.applicationbound():
    with Processor(inject_cwd).applicationbound():
        warn(cc(':blue: :yin_yang: This is a warning'))
Пример #39
0
    def fetch(self):
        canteens = []

        for canteen in self.urls:
            mid, url = canteen

            weeks_to_parse = [""]
            parsed_weeks = []

            while len(weeks_to_parse) > 0:
                week_to_parse = weeks_to_parse.pop(0)

                if week_to_parse in parsed_weeks:
                    continue

                parsed_weeks.append(week_to_parse)

                # Fetch data
                if week_to_parse:
                    params = urlencode({"WA": week_to_parse})
                else:
                    params = urlencode({})

                try:
                    content = urlopen(url, params).read()
                except URLError:
                    error("Request failed for URL %s." % url)
                    continue

                # Build lxml tree
                tree = html.fromstring(content)

                # Look for further weeks to parse
                wboxh_elements = tree.cssselect("div#wbox > input")
                for wbox in wboxh_elements:
                    week_value = wbox.get("value")
                    if wbox.get("class") == "wboxh":
                        if not week_value in weeks_to_parse\
                            and not week_value in parsed_weeks:
                            weeks_to_parse.append(week_value)
                    else:
                        if not week_value in parsed_weeks:
                            parsed_weeks.append(week_value)
                            week_to_parse = week_value

                # Parse canteen infos
                canteen = {}
                canteen["mid"] = mid

                # Name
                canteen["name"] = beautify_string(tree.cssselect("div.WoHSName")[0].text)
                canteen["short_name"] = canteen["name"].split(",", 1)[0]

                # Address
                canteen["address"] = beautify_string(tree.cssselect("div.WoAdr")[0].text)
                canteen["address"] = canteen["address"].replace(")", "")
                canteen["address"] = canteen["address"].replace("(", "")

                # Period
                canteen["period_value"] = week_to_parse
                canteen["period"] = beautify_string(tree.cssselect("div.WoDate")[0].text)

                # Year
                try:
                    canteen["year"] = int(canteen["period"].rsplit(".", 1)[1])
                except (IndexError, ValueError):
                    error("%s: Failed to parse year from period" % canteen["short_name"])
                    canteen["year"] = date.today().year

                # Week
                try:
                    canteen["week"] = int(canteen["period"][canteen["period"].find("KW")+3:canteen["period"].find(",")])
                except ValueError:
                    error('%s: Failed to parse week number from "%s"' % (canteen["short_name"], canteen["period"]))
                    continue

                # Opening times
                opening_times = beautify_string(tree.cssselect("div.WoTime")[0].text_content())
                opening_times = opening_times[opening_times.find(":")+1:].strip()
                canteen["opening_times"] = opening_times

                # Main note
                main_note_e = tree.cssselect("div.Meldung_std")
                if main_note_e:
                    canteen["main_note"] = main_note_e[0].text_content()
                else:
                    canteen["main_note"] = ""

                # Additives
                try:
                    canteen["additives"] = beautify_string(tree.cssselect("table.zusatz_std")[0].text_content(), False)
                except IndexError:
                    warn("%s:  Cannot parse additives" % canteen["short_name"])

                # Parse meals
                # Subtree for better speed
                sub_trees = tree.cssselect("table.wo_std")
                if sub_trees:
                    sub_tree = sub_trees[0]

                    # Bars (table heads)
                    bars = []
                    for th in sub_tree.cssselect("th.bli_0"):
                        bar = beautify_string(th.getchildren()[0].text_content())
                        if bar:
                            bars.append(bar)

                    # Side dishes
                    side_dishes = []
                    for td in sub_tree.cssselect("td.bli_1.sw.bto"):
                        side_dish = self.parse_side_dish(td)
                        side_dishes.append(side_dish)

                    current_side_dish = 0

                    # Days
                    days = []
                    for i, tr in enumerate(sub_tree.cssselect("tr.bto")):
                        day = {}

                        # Date
                        date_s = beautify_string([e for e in tr.cssselect("td.bre_1")[0].itertext()][1])
                        # Make sure date has correct format
                        try:
                            day["date"] = str(date(*(strptime(date_s, "%d.%m.%Y")[0:3])))
                        except ValueError:
                            error("%s: %s has invalid date format" % (canteen["short_name"], date_s))
                            continue

                        # Note
                        note_td = tr.cssselect("td.Meld_TgStd")
                        if not note_td:
                            day["note"] = ""

                            # Side dish
                            try:
                                day["side_dish"] = side_dishes[current_side_dish]
                                current_side_dish = current_side_dish + 1
                            except IndexError:
                                warn("%s: Failed to parse side dish for day %s" % (canteen["short_name"], day["date"]))
                                day["side_dish"] = ""

                            # Prices
                            prices = []
                            for td in tr.cssselect("td.re"):
                                price = beautify_string(td.text_content())
                                if price:
                                    prices.append(price)

                            # Meals
                            meals = []
                            for j, td in enumerate(tr.cssselect("td.bli_1.abst_o")):
                                description = beautify_string(td.text_content())
                                if description:
                                    meal = {}

                                    try:
                                        if (j <= len(bars) - 1):
                                            meal["bar"] = bars[j]
                                        else:
                                            meal["bar"] = ""

                                        meal["description"] = description

                                        if (j <= len(prices) - 1):
                                            meal["price"] = prices[j]
                                        else:
                                            meal["price"] = ""
                                    except IndexError:
                                        error("%s: IndexError while parsing meals" % canteen["short_name"])

                                    nutrition_infos = self.parse_nutrition_infos(td) 
                                    if nutrition_infos != None:
                                        meal["nutrition_infos"] = nutrition_infos

                                    meals.append(meal)

                            day["meals"] = meals
                        else:
                            day["note"] = beautify_string(note_td[0].text_content())

                        days.append(day)

                    canteen["days"] = days

                canteens.append(canteen)

        return canteens
Пример #40
0
        return [(s[0], gene, tuple(s[1:]), samples_path.format(s[0]), profile,
                 solver) for s in samples]

    samples = []
    for gene in 'CYP2D6 CYP2A6 CYP2C19 CYP2C8 CYP2C9 CYP3A4 CYP3A5 CYP4F2 TPMT DPYD'.split(
    ):
        loc = '/data/cb/inumanag/aldy/cdc/pgrnseq-v1/bams/{}.cram'
        samples += get_samples('PGRNseq-v1 (CDC)', gene, loc, 'pgrnseq-v1',
                               'gurobi')
    for gene in ['CYP2D6']:
        loc = '/data/cb/inumanag/aldy/baylor/pgrnseq-v2/bams/{}.cram'
        samples += get_samples('PGRNseq-v2', gene, loc, 'pgrnseq-v2', 'gurobi')

    pool = Pool(processes=np)
    result = pool.map(
        f, samples
    )  #[y for y in samples if y[0] == 'NA17012' and y[1] == 'CYP2D6'])
    result = list(result)
    logbook.warn('Passed {} out of {} ({} subset, {} multi)',
                 sum(1 for _, x in result if x > 0), len(result),
                 sum(1 for x in result if x == 2),
                 sum(1 for x in result if x == 3))

    fails = [':'.join(s) for s, x in result if x == 0]
    if len(fails) > 0:
        logbook.warn('Fail:\n{}', '\n   '.join(fails))

    # get_samples('PGRNseq-v2 (Old)', gene='CYP2D6', samples_path='/../data/pgrnseq-old/{}.dz', profile='pgrnseq-v2', threshold=.5)
    # get_samples('Illumina',      gene='CYP2D6', samples_path='/../data/illumina/{}.bam',        profile='illumina', threshold=.5)
    # get_samples('Illumina (IU)', gene='CYP2D6', samples_path='/../data/illumina-milan/{}.bam',  profile='illumina', threshold=.5)
                warn("Can't change directory's owner, please do it correctly!")
    except KeyError:
        warn("User {} not found, please check the directory's rights."
             .format(server_user))

    # ask a password for the server
    text = "The server password that you use in the mobile app: "
    password = getpass.getpass(prompt=text)
    passhash = hashlib.sha512(password.encode('utf-8')).hexdigest()

    with open(filename, 'w') as settings:
        settings.write("# generated settings for PhotoBackup Bottle server\n")
        settings.write("MEDIA_ROOT = '{}'\n".format(media_root))
        settings.write("PASSWORD = '******'\n".format(passhash))

    notice("Settings file is created, please launch me again!")
    return media_root, passhash

MEDIA_ROOT, PASSWORD = None, None

# import user-created settings for this specific server
try:
    from photobackup_settings import MEDIA_ROOT, PASSWORD
    if os.path.isdir(MEDIA_ROOT) and os.path.exists(MEDIA_ROOT):
        notice("pictures directory is " + MEDIA_ROOT)
    else:
        sys.exit("pictures directory " + MEDIA_ROOT + "does not exist!")
except ImportError:
    warn("Can't find photobackup_settings.py file, creating it")
    MEDIA_ROOT, PASSWORD = create_settings_file()
Пример #42
0
 def __call__(self):
     from logbook.queues import MultiProcessingHandler
     with MultiProcessingHandler(self.queue):
         logbook.warn(self.message)
Пример #43
0
 def __call__(self):
     from logbook.queues import MultiProcessingHandler
     with MultiProcessingHandler(self.queue):
         logbook.warn(self.message)
Пример #44
0
 def send_back():
     with MultiProcessingHandler(queue):
         logbook.warn('Hello World')
def save_image(username, domain):
    debug("got a POST root call.")
    # app.logger.debug('Query:'+pp.pformat(request))
    # app.logger.debug('Query:'+pp.pformat(request.form))
    password = request.form.get('password')
    if password != config['Password']:
        error("password NOT ok.")
        end(403, "wrong password!")

    debug("ok: password")
    # app.logger.debug('File:'+pp.pformat(request.files))
    try:
        debug("ok: getlist %s." % (pp.pformat(request.getlists)))
    except Exception as e:
        debug("getlist failed %s." % (str(e)))

    try:
        upfile = request.files.get('upfile')
        # upfile = request.files['upfile']
    except Exception as e:
        debug("upfile request get failed %s." % (str(e)))

    if not upfile:
        error("no file in the request.")
        end(401, "no file in the request!")
    upfile1 = upfile

    debug("ok: file present in request.")
    # remove anypath inside the filename to insure against injection.
    # ex: upfil.raw_filename should not contain any '..'
    # already done by secure_filename
    # filename = os.path.basename(filename)
    try:
        filename = secure_filename(upfile.filename)
    except Exception as e:
        debug("secure_filename failed: %s:%s" % (upfile.filename, str(e)))

    debug("ok: secure_filename succeed %s" % filename)
    # Prevent uploading file with more than 1 dot.
    dotCount = filename.count('.')
    if dotCount != 1:
        error("file do contains more than 1 dot.")
        end(403, "file contains more than 1 dot!")

    debug("ok: file do not contains more than 1 dot.")
    # Prevent uploading from unwanted file which can be used for injection
    extension = os.path.splitext(filename)[1].lower()
    if extension not in allowed_extention:
        error("file extension NOT allowed '%s'." % extension)
        debug("error: allowed %s." % (pp.pformat(allowed_extention)))
        end(403, "file extension not allowed!")

    debug("ok: file extension allowed.")

    # app.logger.debug('Query:'+pp.pformat(upfile))

    # extract the exif date, to get the date
    try:
        tags = exifread.process_file(upfile)
        # for tag in tags.keys():
        #    if tag not in ('JPEGThumbnail', 'TIFFThumbnail', 'Filename', 'EXIF MakerNote'):
        #        debug("Key: %s, value %s" % (tag, tags[tag]))
    except Exception as e:
        debug("exif not working %s" % str(e))

    debug("ok: got the img exif content")

    # EXIF DateTimeOriginal
    # value 2015:09:14 15:37:03
    try:
        if 'EXIF DateTimeOriginal' in tags:
            debug("ok: got exif EXIF DateTimeOriginal")
            date = str(tags['EXIF DateTimeOriginal'])
        elif 'Image DateTime' in tags:
            # generated PANO file
            debug("ok: got exif Image DateTime")
            date = str(tags['Image DateTime'])
        else:
            debug("hum, no date found in exif tag")
            date = ''
            for tag in tags.keys():
                if tag not in ('JPEGThumbnail', 'TIFFThumbnail', 'Filename', 'EXIF MakerNote'):
                    debug("Key: %s, value %s" % (tag, tags[tag]))

        debug("ok, no exception up to now")
    except Exception as e:
        for tag in tags.keys():
            if tag not in ('JPEGThumbnail', 'TIFFThumbnail', 'Filename', 'EXIF MakerNote'):
                debug("Key: %s, value %s" % (tag, tags[tag]))
        debug("error: failed to read date from read tags from array.")
        end(400, "oups, read tags from array")

    debug("ok: got exif date '%s'" % date)

    if username is not None and domain is not None:
        username = secure_filename(urllib.parse.quote_plus(username).lower())
        domain = secure_filename(urllib.parse.quote_plus(domain).lower())
        debug("username %s, domain %s" % (username, domain))
        basepath = os.path.join(config['MediaRoot'], domain, username)
    else:
        basepath = os.path.join(config['MediaRoot'])

    res = False
    try:
        res = re_date.match(date)
    except Exception as e:
        debug("error: failed to apply regex: %s " % str(e))
    if res:
        date_folder = res.group(1) + '_' + res.group(2) + '_' + res.group(3)
        debug("ok: exif passed '%s'." % date_folder)
        filedir = os.path.join(basepath, date_folder)
    else:
        debug("error: could not find date in string '%s'" % date)
        filedir = os.path.join(basepath)

    debug("ok: using folder '%s'." % filedir)

    filepath = os.path.join(filedir, filename)
    if not os.path.isdir(filedir):
        debug("Need to create folder %s on system." % (filedir))
        try:
            os.makedirs(filedir)
        except Exception as e:
            debug("error: Cannot create folder %s" % str(e))
            end(400, "oups, cannot create directory '%s'." % (str(e)))

    if not os.path.isfile(filepath):
        debug("Storing file %s on system." % (filepath))
        filesize = -1
        try:
            filesize = int(request.form.get('filesize'))
        except TypeError as e:
            debug("error: %s" % str(e))
            end(400, "missing file size in the request!")
        except Exception as e:
            debug("error: %s" % str(e))
            end(400, "missing file size in the request!")

        # save file
        debug("upfile path: '%s'." % (filepath))
        upfile.seek(0, os.SEEK_SET)
        upfile.save(filepath)

        # check file size in request against written file size
        if filesize != os.stat(filepath).st_size:
            debug("error: file sizes do not match '%s' <> '%s'." % (filesize, os.stat(filepath).st_size))
            end(411, "file sizes do not match!")

        return ('', 200)

    else:
        warn("file " + filepath + " already exists")
        filesize = -1
        try:
            filesize = int(request.form.get('filesize'))
        except TypeError as e:
            debug("error: %s" % str(e))
            end(400, "missing file size in the request!")
        except Exception as e:
            debug("error: %s" % str(e))
            end(400, "missing file size in the request!")

        debug("ok: got filesize from header '%s'." % filesize)

        # check file size in request against written file size
        if filesize != os.stat(filepath).st_size:
            debug("error: file sizes do not match '%s' <> '%s'" % (filesize, os.stat(filepath).st_size))
            end(411, "file sizes do not match!")

        return ('', 200)
Пример #46
0
def convert_latlon(in_lat, in_lon, height, dtime, code="G2A"):
    """Converts between geomagnetic coordinates and AACGM coordinates

    Parameters
    ------------
    in_lat : (float)
        Input latitude in degrees N (code specifies type of latitude)
    in_lon : (float)
        Input longitude in degrees E (code specifies type of longitude)
    height : (float)
        Altitude above the surface of the earth in km
    dtime : (datetime)
        Datetime for magnetic field
    code : (str or int)
        Bit code or string denoting which type(s) of conversion to perform
        G2A        - geographic (geodetic) to AACGM-v2
        A2G        - AACGM-v2 to geographic (geodetic)
        TRACE      - use field-line tracing, not coefficients
        ALLOWTRACE - use trace only above 2000 km
        BADIDEA    - use coefficients above 2000 km
        GEOCENTRIC - assume inputs are geocentric w/ RE=6371.2
        (default is "G2A")

    Returns
    -------
    out_lat : (float)
        Output latitude in degrees N
    out_lon : (float)
        Output longitude in degrees E
    out_r : (float)
        Geocentric radial distance (R_Earth) or altitude above the surface of
        the Earth (km)
    """
    import aacgmv2._aacgmv2 as c_aacgmv2

    # Test time
    if isinstance(dtime, dt.date):
        dtime = dt.datetime.combine(dtime, dt.time(0))

    assert isinstance(dtime, dt.datetime), \
        logging.error('time must be specified as datetime object')

    # Test height
    if height < 0:
        logging.warn('conversion not intended for altitudes < 0 km')

    # Initialise output
    lat_out = np.nan
    lon_out = np.nan
    r_out = np.nan

    # Test code
    try:
        code = code.upper()

        if (height > 2000 and code.find("TRACE") < 0
                and code.find("ALLOWTRACE") < 0 and code.find("BADIDEA") < 0):
            estr = 'coefficients are not valid for altitudes above 2000 km. You'
            estr += ' must either use field-line tracing (trace=True '
            estr += 'or allowtrace=True) or indicate you know this '
            estr += 'is a bad idea'
            logging.error(estr)
            return lat_out, lon_out, r_out

        # make flag
        bit_code = convert_str_to_bit(code)
    except AttributeError:
        bit_code = code

    assert isinstance(bit_code, int), \
        logging.error("unknown code {:}".format(bit_code))

    # Test latitude range
    if abs(in_lat) > 90.0:
        assert abs(in_lat) <= 90.1, logging.error('unrealistic latitude')
        in_lat = np.sign(in_lat) * 90.0

    # Constrain longitudes between -180 and 180
    in_lon = ((in_lon + 180.0) % 360.0) - 180.0

    # Set current date and time
    try:
        c_aacgmv2.set_datetime(dtime.year, dtime.month, dtime.day, dtime.hour,
                               dtime.minute, dtime.second)
    except:
        raise RuntimeError("unable to set time for {:}".format(dtime))

    # convert location
    try:
        lat_out, lon_out, r_out = c_aacgmv2.convert(in_lat, in_lon, height,
                                                    bit_code)
    except:
        pass

    return lat_out, lon_out, r_out
def put(path):
    debug("PUT with path '%s'" % (path))

    # remove anypath inside the filename to insure against injection.
    # ex: upfil.raw_filename should not contain any '..'
    # already done by secure_filename
    # filename = os.path.basename(filename)
    try:
        filename = secure_filename(path)
    except Exception as e:
        debug("secure_filename failed: %s:%s" % (path, str(e)))

    debug("ok: secure_filename succeed %s" % filename)
    # Prevent uploading file with more than 1 dot.
    dotCount = filename.count('.')
    if dotCount != 2:
        error("file do not contains more than 2 dot.")
        end(403, "file do not contains 2 dot!")

    debug("ok: file contains just 2 dot.")
    # Prevent uploading from unwanted file which can be used for injection
    root, ext = os.path.splitext(filename)
    first_ext = os.path.splitext(root)[1].lower()
    extension = first_ext + ext
    extension = extension.lower()
    if extension not in allowed_extention:
        error("file extension NOT allowed '%s'." % extension)
        debug("error: allowed %s." % (pp.pformat(allowed_extention)))
        end(403, "file extension not allowed!")

    debug("ok: file extension '%s' allowed." % (extension))

    basepath = os.path.join(config['MediaRoot'])
    filepath = os.path.join(basepath, filename)
    if not os.path.isdir(basepath):
        debug("Need to create folder '%s' on system." % (basepath))
        try:
            os.makedirs(basepath)
        except Exception as e:
            debug("error: Cannot create folder %s" % str(e))
            end(400, "oups, cannot create directory '%s'." % (str(e)))

    if not os.path.isfile(filepath):
        debug("Storing file %s on system." % (filepath))
        filesize = -1
        try:
            filesize = int(request.headers.get('Content-Length'))
        except TypeError as e:
            debug("error: %s" % str(e))
            end(400, "missing file size in the request!")
        except Exception as e:
            debug("error: %s" % str(e))
            end(400, "missing file size in the request!")

        upfile = request.data

        # save file
        debug("upfile path: '%s'." % (filepath))
        with open(filepath, "wb") as fo:
            fo.write(upfile)

        # check file size in request against written file size
        if filesize != os.stat(filepath).st_size:
            debug("error: file sizes do not match '%s' <> '%s'." % (filesize, os.stat(filepath).st_size))
            end(411, "file sizes do not match!")

        return ('', 201)

    else:
        warn("file " + filepath + " already exists")
        end(400, "file already exist!")
Пример #48
0
 def test_calling_frame(self):
     handler = logbook.TestHandler()
     with handler:
         logbook.warn('test')
     self.assertEqual(handler.records[0].calling_frame, sys._getframe())