Beispiel #1
0
def compose(defs, target):
    '''Work through defs tree, building and assembling until target exists'''

    component = defs.get(target)
    if app.config.get('log-verbose'):
        app.log(target, "Composing", component['name'])

    # if we can't calculate cache key, we can't create this component
    if cache_key(defs, component) is False:
        return False

    # if this component is already cached, we're done
    if get_cache(defs, component):
        return cache_key(defs, component)

    # if we have a kbas, look there to see if this component exists
    if app.config.get('kbas-url'):
        with claim(defs, component):
            if get_remote(defs, component):
                app.config['counter'].increment()
                return cache_key(defs, component)

    if component.get('arch') and component['arch'] != app.config['arch']:
        return None

    with sandbox.setup(component):
        assemble(defs, component)
        if 'systems' not in component and not get_cache(defs, component):
            install_dependencies(defs, component)
            build(defs, component)

    return cache_key(defs, component)
Beispiel #2
0
            def get_torrent_info(self, torrent):
                # Dirty hack - make torrent timeout if we have seen it timeout in the past (cache)
                torrent_timeout_id = '%s_timeout' % torrent.hash
                if get_cache(torrent_timeout_id):
                    from datetime import timedelta
                    log.info('Torrent timeout found in cache for %s', torrent.hash)
                    torrent.last_status_change -= timedelta(weeks=100)
                    torrent.save()

                # torrent_info retrieval/cache
                cached_content = get_cache(torrent.hash)
                if cached_content:
                    log.info('Torrent info found in cache for %s', torrent.hash)
                    return cached_content
                else:
                    torrent_bt = default_get_torrent_info(self, torrent)
                    # Only cache once we've got the metadata
                    if torrent_bt.has_metadata:
                        log.info('Adding torrent info in cache for %s', torrent.hash)
                        set_cache(torrent.hash, torrent_bt)
                    # Or if the torrent is timeout
                    elif torrent.is_timeout(settings.BITTORRENT_METADATA_TIMEOUT):
                        log.info('Recording torrent timeout in cache for %s', torrent.hash)
                        set_cache(torrent_timeout_id, True)
                    return torrent_bt
Beispiel #3
0
def invited(token):
    try:
        enemy_token, game_type, game_limit = get_cache(
            'invite_{}'.format(token))
    except:
        return send_error('game not found')
    enemy_user = None
    user_id = get_cache('user_{}'.format(enemy_token))
    if user_id:
        try:
            enemy_user = User.get(pk=user_id)
        except User.DoesNotExist:
            # TODO: if user not found game will be created with None as white player
            pass
    user_token = generate_token(True)
    game = Game.new_game(enemy_token,
                         user_token,
                         game_type,
                         game_limit,
                         white_user=enemy_user,
                         black_user=request.user)
    delete_cache('wait_{}'.format(enemy_token))
    result = {'game': user_token}
    result.update(game.get_info(consts.BLACK))
    return send_data(result)
Beispiel #4
0
 def downloadData(self, airport_icao, flight_number=None):
   infra = Infraero.Harvester()
   if flight_number:
       key = airport_icao+str(int(flight_number))
       flight = get_cache(key)
       if not flight:
           flights = infra.request_flight(airport_icao, flight_number)
           set_cache(key, flights[0], 60*5)
           flights[0].append( ('from_cache','False') )
       else:
           flights = [ flight ]
           flights[0].append( ('from_cache','True') )
   else:
       key = airport_icao
       flights = get_cache(key)
        
       if not flights:
           flights = infra.request_airport(airport_icao)
           set_cache(key, flights, 60*5)
           for flight in flights:
               set_cache(key+str(int(flight[1][1])), flight, 60*5)
               flight.append( ('from_cache','False') )
       else:
           for flight in flights:
               flight.append( ('from_cache','True') )
   return flights
Beispiel #5
0
 def test_logout_2(self):
     # login and logout
     auth_token = self.login(
         *self.add_user('user1', 'password', 'user1@fakemail'))
     self.assertIsNotNone(get_cache(auth_token))
     resp = self.client.get(self.url('logout/'))
     data = self.load_data(resp)
     self.assertIn('message', data)
     self.assertIsNone(get_cache(auth_token))
Beispiel #6
0
def assemble(defs, target):
    '''Assemble dependencies and contents recursively until target exists.'''

    component = defs.get(target)

    if cache_key(defs, component) is False:
        return False

    if get_cache(defs, component):
        return cache_key(defs, component)

    if app.config.get('kbas-url'):
        with claim(defs, component):
            if get_remote(defs, component):
                app.config['counter'].increment()
                return cache_key(defs, component)

    random.seed(datetime.datetime.now())

    if component.get('arch') and component['arch'] != app.config['arch']:
        return None

    sandbox.setup(component)

    systems = component.get('systems', [])
    random.shuffle(systems)
    for system in systems:
        assemble(defs, system['path'])
        for subsystem in system.get('subsystems', []):
            assemble(defs, subsystem)

    dependencies = component.get('build-depends', [])
    for it in dependencies:
        preinstall(defs, component, it)

    contents = component.get('contents', [])
    random.shuffle(contents)
    for it in contents:
        subcomponent = defs.get(it)
        if subcomponent.get('build-mode', 'staging') != 'bootstrap':
            preinstall(defs, component, subcomponent)

    if 'systems' not in component and not get_cache(defs, component):
        if app.config.get('instances', 1) > 1:
            with claim(defs, component):
                # in here, exceptions get eaten
                do_build(defs, component)
        else:
            # in here, exceptions do not get eaten
            do_build(defs, component)

    app.remove_dir(component['sandbox'])

    return cache_key(defs, component)
Beispiel #7
0
def assemble(defs, target):
    '''Assemble dependencies and contents recursively until target exists.'''

    component = defs.get(target)

    if cache_key(defs, component) is False:
        return False

    if get_cache(defs, component):
        return cache_key(defs, component)

    if app.config.get('kbas-url'):
        with claim(defs, component):
            if get_remote(defs, component):
                app.config['counter'].increment()
                return cache_key(defs, component)

    random.seed(datetime.datetime.now())

    if component.get('arch') and component['arch'] != app.config['arch']:
        return None

    sandbox.setup(component)

    systems = component.get('systems', [])
    random.shuffle(systems)
    for system in systems:
        assemble(defs, system['path'])
        for subsystem in system.get('subsystems', []):
            assemble(defs, subsystem)

    dependencies = component.get('build-depends', [])
    for it in dependencies:
        preinstall(defs, component, it)

    contents = component.get('contents', [])
    random.shuffle(contents)
    for it in contents:
        subcomponent = defs.get(it)
        if subcomponent.get('build-mode', 'staging') != 'bootstrap':
            preinstall(defs, component, subcomponent)

    if 'systems' not in component and not get_cache(defs, component):
        if app.config.get('instances', 1) > 1:
            with claim(defs, component):
                # in here, exceptions get eaten
                do_build(defs, component)
        else:
            # in here, exceptions do not get eaten
            do_build(defs, component)

    app.remove_dir(component['sandbox'])

    return cache_key(defs, component)
Beispiel #8
0
 def test_draw_refuse_2(self):
     # add draw request and check cache
     with patch('game.Game.send_ws') as mock:
         self.game.draw_accept(WHITE)
         mock.assert_called_once_with('opponent offered draw',
                                      WS_DRAW_REQUEST, BLACK)
     self.assertFalse(get_cache(self.game._get_draw_name(BLACK)))
     self.assertTrue(get_cache(self.game._get_draw_name(WHITE)))
     # refuse draw by black and check cache again
     self.game.draw_refuse(BLACK)
     self.assertFalse(get_cache(self.game._get_draw_name(BLACK)))
     self.assertFalse(get_cache(self.game._get_draw_name(WHITE)))
Beispiel #9
0
def install(dn, component):
    # populate dn['sandbox'] with the artifact files from component
    if os.path.exists(os.path.join(dn['sandbox'], 'baserock',
                                   component['name'] + '.meta')):
        return
    app.log(dn, 'Sandbox: installing %s' % component['cache'], verbose=True)
    if cache.get_cache(component) is False:
        app.log(dn, 'Unable to get cache for', component['name'], exit=True)
    unpackdir = cache.get_cache(component) + '.unpacked'
    if dn.get('kind') is 'system':
        utils.copy_all_files(unpackdir, dn['sandbox'])
    else:
        utils.hardlink_all_files(unpackdir, dn['sandbox'])
Beispiel #10
0
def install(defs, this, component):
    # populate this['sandbox'] with the artifact files from component
    if os.path.exists(os.path.join(this["sandbox"], "baserock", component["name"] + ".meta")):
        return
    if app.config.get("log-verbose"):
        app.log(this, "Installing %s" % component["cache"])
    if cache.get_cache(defs, component) is False:
        app.exit(this, "ERROR: unable to get cache for", component["name"])
    unpackdir = cache.get_cache(defs, component) + ".unpacked"
    if this.get("kind") is "system":
        utils.copy_all_files(unpackdir, this["sandbox"])
    else:
        utils.hardlink_all_files(unpackdir, this["sandbox"])
Beispiel #11
0
def assemble(defs, target):
    '''Assemble dependencies and contents recursively until target exists.'''

    component = defs.get(target)
    if get_cache(defs, component) or get_remote(defs, component):
        return cache_key(defs, component)

    random.seed(datetime.datetime.now())

    if component.get('arch') and component['arch'] != app.config['arch']:
        app.log(target, 'Skipping assembly for', component.get('arch'))
        return None

    sandbox.setup(component)

    systems = component.get('systems', [])
    random.shuffle(systems)
    for system in systems:
        assemble(defs, system['path'])
        for subsystem in system.get('subsystems', []):
            assemble(defs, subsystem)

    dependencies = component.get('build-depends', [])
    for it in dependencies:
        preinstall(defs, component, it)

    contents = component.get('contents', [])
    random.shuffle(contents)
    for it in contents:
        subcomponent = defs.get(it)
        if subcomponent.get('build-mode', 'staging') != 'bootstrap':
            preinstall(defs, component, subcomponent)

    if 'systems' not in component:
        if is_building(defs, component):
            import time
            time.sleep(10)
            raise Exception

        app.config['counter'] += 1
        if not get_cache(defs, component):
            with app.timer(component, 'build of %s' % component['cache']):
                with claim(defs, component):
                    build(defs, component)

    with app.timer(component, 'artifact creation'):
        do_manifest(component)
        cache(defs, component)
    sandbox.remove(component)

    return cache_key(defs, component)
Beispiel #12
0
 def check_draw(self):
     if not self.model.ended:
         name1 = self._get_draw_name(consts.WHITE)
         name2 = self._get_draw_name(consts.BLACK)
         if get_cache(name1) and get_cache(name2):
             self.model.game_over(consts.END_DRAW)
             delete_cache(name1)
             delete_cache(name2)
             msg = self.get_info()
             self.send_ws(msg, consts.WS_DRAW, consts.WHITE)
             self.send_ws(msg, consts.WS_DRAW, consts.BLACK)
             self.onMove()
             return True
     return False
Beispiel #13
0
def install(dn, component):
    # populate dn['sandbox'] with the artifact files from component
    if os.path.exists(
            os.path.join(dn['sandbox'], 'baserock',
                         component['name'] + '.meta')):
        return
    app.log(dn, 'Sandbox: installing %s' % component['cache'], verbose=True)
    if cache.get_cache(component) is False:
        app.log(dn, 'Unable to get cache for', component['name'], exit=True)
    unpackdir = cache.get_cache(component) + '.unpacked'
    if dn.get('kind') is 'system':
        utils.copy_all_files(unpackdir, dn['sandbox'])
    else:
        utils.hardlink_all_files(unpackdir, dn['sandbox'])
Beispiel #14
0
def deploy_system(system_spec, parent_location=''):
    '''Deploy a system and subsystems recursively.

    Takes a system spec (i.e. an entry in the "systems" list in a cluster
    definition), and optionally a path to a parent system tree. If
    `parent_location` is given then the `location` given in the cluster
    definition for the subsystem is appended to `parent_location`, with
    the result being used as the location for the deployment extensions.

    '''
    system = app.defs.get(system_spec['path'])
    if not cache.get_cache(system):
        app.log('DEPLOY',
                'System is not built, cannot deploy:\n',
                system,
                exit=True)
    deploy_defaults = system_spec.get('deploy-defaults')

    with sandbox.setup(system):
        app.log(system, 'Extracting system artifact into', system['sandbox'])
        with open(cache.get_cache(system), 'r') as artifact:
            call(['tar', 'x', '--directory', system['sandbox']],
                 stdin=artifact)

        for subsystem in system_spec.get('subsystems', []):
            if deploy_defaults:
                subsystem = dict(deploy_defaults.items() + subsystem.items())
            deploy_system(subsystem, parent_location=system['sandbox'])

        for name, deployment in system_spec.get('deploy', {}).iteritems():
            method = deployment.get('type') or deployment.get('upgrade-type')
            method = os.path.basename(method)
            if deploy_defaults:
                deployment = dict(deploy_defaults.items() + deployment.items())
            do_deployment_manifest(system, deployment)
            if parent_location:
                for l in ['location', 'upgrade-location']:
                    if l in deployment:
                        dn = deployment[l].lstrip('/')
                        deployment[l] = os.path.join(parent_location, dn)
            try:
                sandbox.run_extension(system, deployment, 'check', method)
            except KeyError:
                app.log(system, "Couldn't find a check extension for", method)

            for ext in system.get('configuration-extensions', []):
                sandbox.run_extension(system, deployment, 'configure',
                                      os.path.basename(ext))
            os.chmod(system['sandbox'], 0o755)
            sandbox.run_extension(system, deployment, 'write', method)
Beispiel #15
0
def install(defs, this, component):
    # populate this['sandbox'] with the artifact files from component
    if os.path.exists(os.path.join(this['sandbox'], 'baserock',
                                   component['name'] + '.meta')):
        return
    if app.config.get('log-verbose'):
        app.log(this, 'Sandbox: installing %s' % component['cache'])
    if cache.get_cache(defs, component) is False:
        app.exit(this, 'ERROR: unable to get cache for', component['name'])
    unpackdir = cache.get_cache(defs, component) + '.unpacked'
    if this.get('kind') is 'system':
        utils.copy_all_files(unpackdir, this['sandbox'])
    else:
        utils.hardlink_all_files(unpackdir, this['sandbox'])
Beispiel #16
0
def install(defs, this, component):
    # populate this['sandbox'] with the artifact files from component
    if os.path.exists(
            os.path.join(this['sandbox'], 'baserock',
                         component['name'] + '.meta')):
        return
    if app.config.get('log-verbose'):
        app.log(this, 'Installing %s' % component['cache'])
    if cache.get_cache(defs, component) is False:
        app.exit(this, 'ERROR: unable to get cache for', component['name'])
    unpackdir = cache.get_cache(defs, component) + '.unpacked'
    if this.get('kind') is 'system':
        utils.copy_all_files(unpackdir, this['sandbox'])
    else:
        utils.hardlink_all_files(unpackdir, this['sandbox'])
Beispiel #17
0
def deploy_system(system_spec, parent_location=''):
    '''Deploy a system and subsystems recursively.

    Takes a system spec (i.e. an entry in the "systems" list in a cluster
    definition), and optionally a path to a parent system tree. If
    `parent_location` is given then the `location` given in the cluster
    definition for the subsystem is appended to `parent_location`, with
    the result being used as the location for the deployment extensions.

    '''
    system = app.defs.get(system_spec['path'])
    if not cache.get_cache(system):
        app.log('DEPLOY', 'System is not built, cannot deploy:\n', system,
                exit=True)
    deploy_defaults = system_spec.get('deploy-defaults')

    with sandbox.setup(system):
        app.log(system, 'Extracting system artifact into', system['sandbox'])
        with open(cache.get_cache(system), 'r') as artifact:
            call(['tar', 'x', '--directory', system['sandbox']],
                 stdin=artifact)

        for subsystem in system_spec.get('subsystems', []):
            if deploy_defaults:
                subsystem = dict(deploy_defaults.items() + subsystem.items())
            deploy_system(subsystem, parent_location=system['sandbox'])

        for name, deployment in system_spec.get('deploy', {}).iteritems():
            method = deployment.get('type') or deployment.get('upgrade-type')
            method = os.path.basename(method)
            if deploy_defaults:
                deployment = dict(deploy_defaults.items() + deployment.items())
            do_deployment_manifest(system, deployment)
            if parent_location:
                for l in ['location', 'upgrade-location']:
                    if l in deployment:
                        dn = deployment[l].lstrip('/')
                        deployment[l] = os.path.join(parent_location, dn)
            try:
                sandbox.run_extension(system, deployment, 'check', method)
            except KeyError:
                app.log(system, "Couldn't find a check extension for", method)

            for ext in system.get('configuration-extensions', []):
                sandbox.run_extension(system, deployment, 'configure',
                                      os.path.basename(ext))
            os.chmod(system['sandbox'], 0o755)
            sandbox.run_extension(system, deployment, 'write', method)
Beispiel #18
0
def deploy(target):
    '''Deploy systems and subsystems recursively'''

    defs = Definitions()
    deployment = target if type(target) is dict else defs.get(target)

    with app.timer(deployment, 'Starting deployment'):
        for system in deployment.get('systems', []):
            deploy(system)
            for subsystem in system.get('subsystems', []):
                deploy(subsystem)

        system = defs.get(deployment['path'])
        if system.get('arch') and system['arch'] != app.settings['arch']:
            app.log(target, 'Skipping deployment for', system['arch'])
            return None

        sandbox.setup(system)
        for name, deployment in deployment.get('deploy', {}).iteritems():
            method = os.path.basename(deployment['type'])
            sandbox.run_extension(system, deployment, 'check', method)
            app.log(system, "Extracting system artifact")
            with open(cache.get_cache(system), "r") as artifact:
                call(['tar', 'x', '--directory', system['sandbox']],
                     stdin=artifact)

            for ext in system.get('configuration-extensions', []):
                sandbox.run_extension(system, deployment, 'configure',
                                      os.path.basename(ext))

            os.chmod(system['sandbox'], 0o755)
            sandbox.run_extension(system, deployment, 'write', method)
        sandbox.remove(system)
Beispiel #19
0
def install_contents(dn, contents=None):
    ''' Install contents (recursively) into dn['sandbox'] '''

    if contents is None:
        contents = dn.get('contents', [])

    log(dn, 'Installing contents\n', contents, verbose=True)

    shuffle(contents)
    for it in contents:
        item = app.defs.get(it)
        if os.path.exists(os.path.join(dn['sandbox'],
                                       'baserock', item['name'] + '.meta')):
            # content has already been installed
            log(dn, 'Already installed', item['name'], verbose=True)
            continue

        for i in item.get('contents', []):
            install_contents(dn, [i])

        if item.get('build-mode', 'staging') != 'bootstrap':
            if not get_cache(item):
                compose(item)
            sandbox.install(dn, item)

    if config.get('log-verbose'):
        log(dn, 'Added contents\n', contents)
        sandbox.list_files(dn)
Beispiel #20
0
def watch(src, fn, cache_name):
  '''
  Watch for file name patterns and executes a function when updates occur.

  The callback function must receive an argument which will be the list of
  updated files.
  '''
  cache = {}
  cache_path = get_cache(cache_name)
  if os.path.exists(cache_path):
    with open(cache_path, 'r') as stored:
      cache = pickle.load(stored)

  fpaths = chain(*[glob(patt) for patt in src])
  removed = cache.copy()
  updated = []
  for path in fpaths:
    removed.pop(path, None)
    lmtime = cache.get(path, None)
    mtime = datetime.datetime.fromtimestamp(os.path.getmtime(path))
    if None is lmtime or lmtime < mtime:
      cache[path] = mtime
      updated.append(path)

  for path in removed:
    updated.append(path)
    del cache[path]

  if 0 < len(updated):
    fn(updated)
    with open(cache_path, 'w') as store:
      pickle.dump(cache, store)
Beispiel #21
0
def store_in_mongo():
    BASE  = '../cache/'
    FILES = [os.path.join(BASE,fn) for fn in next(os.walk(BASE))[2]]

    conn = pymongo.MongoClient("localhost", 27017)
    db   = conn.twitter
    coll = db.data

    for f in FILES:

        fn = f.split('/')[2]

        if fn[0] == '.': continue

        uid, types = fn.split('_')
        uid = int(uid)
        ctype, ftype = types.split('.')

        print uid, ctype, ftype

        data = get_cache(uid, ctype, ftype)

        if ctype == 'tweets':
            for i in range(len(data)):
                data[i] = traverse_dict(data[i])

        if ctype in ('info'):
            data = traverse_dict(data)

        coll.insert({'id': uid, 'type': ctype, 'data': data})
Beispiel #22
0
def store_in_mongo():
    BASE = '../cache/'
    FILES = [os.path.join(BASE, fn) for fn in next(os.walk(BASE))[2]]

    conn = pymongo.MongoClient("localhost", 27017)
    db = conn.twitter
    coll = db.data

    for f in FILES:

        fn = f.split('/')[2]

        if fn[0] == '.': continue

        uid, types = fn.split('_')
        uid = int(uid)
        ctype, ftype = types.split('.')

        print uid, ctype, ftype

        data = get_cache(uid, ctype, ftype)

        if ctype == 'tweets':
            for i in range(len(data)):
                data[i] = traverse_dict(data[i])

        if ctype in ('info'):
            data = traverse_dict(data)

        coll.insert({'id': uid, 'type': ctype, 'data': data})
Beispiel #23
0
def assemble(target):
    '''Assemble dependencies and contents recursively until target exists.'''
    if cache.get_cache(target):
        return

    defs = Definitions()
    this = defs.get(target)

    with app.timer(this, 'Starting assembly'):
        with sandbox.setup(this):
            for it in this.get('build-depends', []):
                dependency = defs.get(it)
                assemble(dependency)
                sandbox.install(this, dependency)

            for it in this.get('contents', []):
                component = defs.get(it)
                if component.get('build-mode') == 'bootstrap':
                    continue
                assemble(component)
                sandbox.install(this, component)

            if this.get('build-mode') != 'bootstrap':
                sandbox.ldconfig(this)
            else:
                app.log(this, "No ldconfig because bootstrap mode is engaged")

            build(this)

            if this.get('devices'):
                sandbox.create_devices(this)
            cache.cache(this)
Beispiel #24
0
def get_messages(ticker, data_folder):
	def response():
		return requests.get(f'https://api.stocktwits.com/api/2/streams/symbol/{ticker}.json')

	fileCache = cache.get_cache('STOCK_TWITS', data_folder)
	resp = fileCache.get(key=ticker, createfunc=response)
	return resp.json()['messages'];
Beispiel #25
0
 def load_game(cls, token):
     try:
         try:
             game_model = models.Game.get_game(token)
         except models.Game.DoesNotExist:
             data = get_cache('wait_{}'.format(token))
             if data:
                 raise errors.GameNotStartedError(*data)
             raise errors.GameNotFoundError
         game = cls(game_model.white, game_model.black)
         game.model = game_model
         game.game = engine.Game(game.model.state, game.model.next_color,
                                 game.model.cut)
         game._loaded_by = game_model._loaded_by
         if game.model.is_time_over():
             winner = game.model.winner
             loser = invert_color(winner)
             game.send_ws(game.get_info(loser), consts.WS_LOSE, loser)
             game.send_ws(game.get_info(winner), consts.WS_WIN, winner)
         if not game.model.ended:
             game.check_draw()
             game.check_castles()
     except errors.GameNotStartedError:
         raise
     except:
         raise errors.GameNotFoundError
     return game
Beispiel #26
0
def install_contents(dn, contents=None):
    ''' Install contents (recursively) into dn['sandbox'] '''

    if contents is None:
        contents = dn.get('contents', [])

    log(dn, 'Installing contents\n', contents, verbose=True)

    shuffle(contents)
    for it in contents:
        item = app.defs.get(it)
        if os.path.exists(
                os.path.join(dn['sandbox'], 'baserock',
                             item['name'] + '.meta')):
            # content has already been installed
            log(dn, 'Already installed', item['name'], verbose=True)
            continue

        for i in item.get('contents', []):
            install_contents(dn, [i])

        if item.get('build-mode', 'staging') != 'bootstrap':
            if not get_cache(item):
                compose(item)
            sandbox.install(dn, item)

    if config.get('log-verbose'):
        log(dn, 'Added contents\n', contents)
        sandbox.list_files(dn)
Beispiel #27
0
def load_manifest(defs, target):
    cachepath, cachedir = os.path.split(cache.get_cache(defs, target))
    metafile = cachepath + ".meta"
    metadata = None
    definition = defs.get(target)
    name = definition['name']

    path = None
    if type(target) is str:
        path = target
    else:
        path = target['name']

    try:
        with open(metafile, "r") as f:
            metadata = yaml.safe_load(f)
    except:
        app.log(name, 'WARNING: problem loading metadata', metafile)
        return None

    if metadata:
        app.log(name, 'loaded metadata for', path)
        defs.set_member(path, '_loaded', True)
        if metadata.get('products'):
            defs.set_member(path, '_artifacts', metadata['products'])
def newepisodes(episodes, cachefile, name):
    '''Checks the episode against our pickle database 
    to determine if this is a new episodes'''

    # read our cache
    cache = get_cache(cachefile)
   
    newepisodes = {}
    for e in episodes:
        for results in e:
            if results.isdigit():
                epnum = results
                episodename = name + epnum

        # compare episode with cache
        new = True
        for c in cache:
            if episodename in c:
                new = False
        if new:
            newepisodes[epnum] = e

    newepisodes = list(newepisodes.values())

    return newepisodes
Beispiel #29
0
    def _build(self, env, output_path, force, no_filters, parent_filters=[]):
        """Internal recursive build method.
        """

        # TODO: We could support a nested bundle downgrading it's debug
        # setting from "filters" to "merge only", i.e. enabling
        # ``no_filters``. We cannot support downgrading to
        # "full debug/no merge" (debug=True), of course.
        #
        # Right now we simply use the debug setting of the root bundle
        # we build, und it overrides all the nested bundles. If we
        # allow nested bundles to overwrite the debug value of parent
        # bundles, as described above, then we should also deal with
        # a child bundle enabling debug=True during a merge, i.e.
        # raising an error rather than ignoring it as we do now.
        resolved_contents = self.resolve_contents(env)
        if not resolved_contents:
            raise BuildError('empty bundle cannot be built')

        # Ensure that the filters are ready
        for filter in self.filters:
            filter.set_environment(env)

        # Apply input filters to all the contents. Note that we use
        # both this bundle's filters as well as those given to us by
        # the parent. We ONLY do those this for the input filters,
        # because we need them to be applied before the apply our own
        # output filters.
        # TODO: Note that merge_filters() removes duplicates. Is this
        # really the right thing to do, or does it just confuse things
        # due to there now being different kinds of behavior...
        combined_filters = merge_filters(self.filters, parent_filters)
        cache = get_cache(env)
        hunks = []
        for c in resolved_contents:
            if isinstance(c, Bundle):
                hunk = c._build(env, output_path, force, no_filters,
                                combined_filters)
                hunks.append(hunk)
            else:
                if is_url(c):
                    hunk = UrlHunk(c)
                else:
                    hunk = FileHunk(env.abspath(c))
                if no_filters:
                    hunks.append(hunk)
                else:
                    hunks.append(
                        apply_filters(hunk,
                                      combined_filters,
                                      'input',
                                      cache,
                                      output_path=output_path))

        # Return all source hunks as one, with output filters applied
        final = merge(hunks)
        if no_filters:
            return final
        else:
            return apply_filters(final, self.filters, 'output', cache)
Beispiel #30
0
def build_profile(ticker, data_folder):
    def response():
        return finviz.get_stock(ticker)

    fileCache = cache.get_cache('STOCK', data_folder)
    stock = fileCache.get(key=ticker, createfunc=response)

    return {
        'ticker': ticker,
        'industry': stock['Industry'],
        'name': stock['Company'],
        'pe': stock['P/E'],
        'eps': stock['EPS (ttm)'],
        'eps_q': stock['EPS next Q'],
        'mc': stock['Market Cap'],
        'forward_pe': stock['Forward P/E'],
        'income': stock['Income'],
        'sales': stock['Sales'],
        '52w': stock['52W Range'],
        'roe': stock['ROE'],
        'avg_vol': stock['Avg Volume'],
        'vol': stock['Volume'],
        'shares': stock['Shs Outstand'],
        'debt_eq': stock['Debt/Eq'],
        'short_perc': stock['Short Float'],
        'earnings': stock['Earnings']
    }
Beispiel #31
0
 def get_by_token(cls, token):
     user_id = get_cache(token)
     if not user_id:
         return
     try:
         return cls.get(pk=user_id)
     except cls.DoesNotExist:
         return
Beispiel #32
0
    def __init__(self, domain):
        self.domain = domain
        self.data = cache.get_cache()

        self.client_id = None
        self.client_secret = None

        self.timeout = 20
Beispiel #33
0
def assemble(defs, target):
    '''Assemble dependencies and contents recursively until target exists.'''

    if cache.get_cache(defs, target):
        # needed for artifact splitting
        load_manifest(defs, target)
        return cache.cache_key(defs, target)

    random.seed(datetime.datetime.now())
    component = defs.get(target)

    if component.get('arch') and component['arch'] != app.config['arch']:
        app.log(target, 'Skipping assembly for', component.get('arch'))
        return None

    def assemble_system_recursively(system):
        assemble(defs, system['path'])

        for subsystem in system.get('subsystems', []):
            assemble_system_recursively(subsystem)

    with app.timer(component, 'assembly'):
        sandbox.setup(component)

        systems = component.get('systems', [])
        random.shuffle(systems)
        for system in systems:
            assemble_system_recursively(system)

        dependencies = component.get('build-depends', [])
        random.shuffle(dependencies)
        for it in dependencies:
            dependency = defs.get(it)
            assemble(defs, dependency)
            sandbox.install(defs, component, dependency)

        contents = component.get('contents', [])
        random.shuffle(contents)
        for it in contents:
            subcomponent = defs.get(it)
            if subcomponent.get('build-mode') != 'bootstrap':
                assemble(defs, subcomponent)
                splits = None
                if component.get('kind') == 'system':
                    splits = subcomponent.get('artifacts')
                sandbox.install(defs, component, subcomponent, splits)

        app.config['counter'] += 1
        if 'systems' not in component:
            with app.timer(component, 'build'):
                build(defs, component)
        with app.timer(component, 'artifact creation'):
            do_manifest(defs, component)
            cache.cache(defs, component,
                        full_root=component.get('kind') == "system")
        sandbox.remove(component)

    return cache.cache_key(defs, component)
Beispiel #34
0
    def __init__(self, config: Configuration):
        super().__init__('RedisCassandra')
        redis_host = config.get('redis.host')
        redis_port = config.get('redis.port')
        redis_pass = config.get('redis.password')

        #TODO: change to Sentinel so we can handle multiple hosts
        self.redis_conn = StrictRedis(host=redis_host, port=redis_port, password=redis_pass)
        self.cache = get_cache()
Beispiel #35
0
    def _build(self, env, output_path, force, no_filters, parent_filters=[]):
        """Internal recursive build method.
        """

        # TODO: We could support a nested bundle downgrading it's debug
        # setting from "filters" to "merge only", i.e. enabling
        # ``no_filters``. We cannot support downgrading to
        # "full debug/no merge" (debug=True), of course.
        #
        # Right now we simply use the debug setting of the root bundle
        # we build, und it overrides all the nested bundles. If we
        # allow nested bundles to overwrite the debug value of parent
        # bundles, as described above, then we should also deal with
        # a child bundle enabling debug=True during a merge, i.e.
        # raising an error rather than ignoring it as we do now.
        resolved_contents = self.resolve_contents(env)
        if not resolved_contents:
            raise BuildError('empty bundle cannot be built')

        # Ensure that the filters are ready
        for filter in self.filters:
            filter.set_environment(env)

        # Apply input filters to all the contents. Note that we use
        # both this bundle's filters as well as those given to us by
        # the parent. We ONLY do those this for the input filters,
        # because we need them to be applied before the apply our own
        # output filters.
        # TODO: Note that merge_filters() removes duplicates. Is this
        # really the right thing to do, or does it just confuse things
        # due to there now being different kinds of behavior...
        combined_filters = merge_filters(self.filters, parent_filters)
        cache = get_cache(env)
        hunks = []
        for c in resolved_contents:
            if isinstance(c, Bundle):
                hunk = c._build(env, output_path, force, no_filters,
                                combined_filters)
                hunks.append(hunk)
            else:
                if is_url(c):
                    hunk = UrlHunk(c)
                else:
                    hunk = FileHunk(env.abspath(c))
                if no_filters:
                    hunks.append(hunk)
                else:
                    hunks.append(apply_filters(
                        hunk, combined_filters, 'input', cache,
                        output_path=output_path))

        # Return all source hunks as one, with output filters applied
        final = merge(hunks)
        if no_filters:
            return final
        else:
            return apply_filters(final, self.filters, 'output', cache)
Beispiel #36
0
def register_to_rates(code, start=None, end=None, field=None, observer=None):
    '''
    Fetch a single rate
    '''
    cache = get_cache()
    code, field = splitcode(code)
    rateholder = cache.get_rate_holder(code)
    if rateholder != None:
        rateholder.register_to_rates(start, end, field, observer=observer)
Beispiel #37
0
 def get_url(url):
     url_id = url.replace('/', '_')
     cached_content = get_cache(url_id)
     if cached_content:
         return cached_content
     else:
         content = default_get_url(url)
         set_cache(url_id, content)
         return content
Beispiel #38
0
def register_to_rates(code, start=None, end=None, field=None, observer = None):
    '''
    Fetch a single rate
    '''
    cache = get_cache()
    code, field = splitcode(code)
    rateholder = cache.get_rate_holder(code)
    if rateholder != None:
        rateholder.register_to_rates(start,end,field,observer=observer)
Beispiel #39
0
 def decorator(*args, **kwargs):
     fn = name if name else '.'.join([f.__module__, f.__name__])
     cache_name = get_cache_func_name(fn, *args, **kwargs)
     results = get_cache(cache_name)
     if results is not None:
         return results
     results = f(*args, **kwargs)
     set_cache(cache_name, results, time)
     return results
Beispiel #40
0
def install(defs, this, component):
    if os.path.exists(os.path.join(this['sandbox'], 'baserock',
                                   component['name'] + '.meta')):
        return
    if app.config.get('log-verbose'):
        app.log(this, 'Installing %s' % component['cache'])
    unpackdir = cache.get_cache(defs, component) + '.unpacked'
    if this.get('kind') is 'system':
        utils.copy_all_files(unpackdir, this['sandbox'])
    else:
        utils.hardlink_all_files(unpackdir, this['sandbox'])
def add_description_to_profile(stock, ticker, api_key, data_folder):
	def response():
		return requests.get(f'https://www.alphavantage.co/query?function=OVERVIEW&symbol={ticker}&interval=5min&apikey={api_key}')

	fileCache = cache.get_cache('OVERVIEW', data_folder)
	resp = fileCache.get(key=ticker, createfunc=response)

	if 'Description' in resp.json().keys():
		stock['description']=resp.json()['Description']

	return stock
Beispiel #42
0
 def __init__(self, app):
     super(_ServerSessionInterface, self).__init__()
     self._cache = cache.get_cache(app.config.get('SESSION_CACHE_NAME'))
     self._cache_key_prefix = app.config.get('SESSION_CACHE_KEY',
                                             'session.')
     if app.permanent_session_lifetime == 0:
         self._session_timeout = 24 * 60 * 60
         self._permanent = False
     else:
         self._session_timeout = sessions.total_seconds(
             app.permanent_session_lifetime)
         self._permanent = True
     self._cache_timeout = self._session_timeout * 2
Beispiel #43
0
def get_rate(code, dte = None):
    '''
    Fetch a single rate
    code is a string which specifies the data_id, the field (optional) and the
    vendor (optional)
    '''
    cache = get_cache()
    dte   = cache.get_livedate(dte)
    code, field, vendor = splitcode(code)
    rateholder = cache.get_rate_holder(code)
    if rateholder != None:
        return rateholder.get_or_make(dte,field,vendor)
    else:
        return None
Beispiel #44
0
def get_rate(code, dte=None):
    '''
    Fetch a single rate
    code is a string which specifies the data_id, the field (optional) and the
    vendor (optional)
    '''
    cache = get_cache()
    dte = cache.get_livedate(dte)
    code, field, vendor = splitcode(code)
    rateholder = cache.get_rate_holder(code)
    if rateholder != None:
        return rateholder.get_or_make(dte, field, vendor)
    else:
        return None
Beispiel #45
0
def deploy_system(defs, system_spec, parent_location=''):
    '''Deploy a system and subsystems recursively.

    Takes a system spec (i.e. an entry in the "systems" list in a cluster
    definition), and optionally a path to a parent system tree. If
    `parent_location` is given then the `location` given in the cluster
    definition for the subsystem is appended to `parent_location`, with
    the result being used as the location for the deployment extensions.

    '''
    system = defs.get(system_spec['path'])
    deploy_defaults = system_spec.get('deploy-defaults')

    if system.get('arch') and system['arch'] != app.settings['arch']:
        app.log(system, 'Skipping deployment for', system['arch'])
        return None

    sandbox.setup(system)
    app.log(system, 'Extracting system artifact into', system['sandbox'])
    with open(cache.get_cache(defs, system), 'r') as artifact:
        call(['tar', 'x', '--directory', system['sandbox']],
              stdin=artifact)

    for subsystem_spec in system_spec.get('subsystems', []):
        if deploy_defaults:
            subsystem_spec = dict(deploy_defaults.items()
                                  + subsystem_spec.items())
        deploy_system(defs, subsystem_spec, parent_location=system['sandbox'])

    for name, deployment in system_spec.get('deploy', {}).iteritems():
        method = os.path.basename(deployment['type'])
        if deploy_defaults:
            deployment = dict(deploy_defaults.items()
                              + deployment.items())
        do_deployment_manifest(system, deployment)
        if parent_location:
            deployment['location'] = os.path.join(
                parent_location, deployment['location'].lstrip('/'))
        try:
            sandbox.run_extension(system, deployment, 'check', method)
        except KeyError:
            app.log(system, "Couldn't find a check extension for",
                    method)
        for ext in system.get('configuration-extensions', []):
            sandbox.run_extension(system, deployment, 'configure',
                                  os.path.basename(ext))
        os.chmod(system['sandbox'], 0o755)
        sandbox.run_extension(system, deployment, 'write', method)
    sandbox.remove(system)
Beispiel #46
0
    def call_api(self,
                 api,
                 data=None,
                 method="GET",
                 last_checked=None,
                 filter=None,
                 data_type="application/json"):
        cache_data = cache.get_cache()
        if not "token" in cache_data:
            raise AuthorizationError(
                "You are not signed in. Please sign in before making REST calls."
            )

        return self.make_api_call(api, data, method, last_checked, filter,
                                  data_type)
Beispiel #47
0
def get_user_following(api, user_id):
    '''
    api: twitter.Api oauth login object
    user_id: twitter user id as integer

    return: List of integer ids following the user_id
    '''

    following = get_cache(user_id, 'following', 'csv')

    if following == None:
        following = api.GetFriendIDs(user_id = user_id)
        make_cache(following, user_id, 'following', 'csv')

    return following
Beispiel #48
0
 def __init__(self, auth_data):
     super().__init__(auth_data)
     self.auth_data = auth_data
     auth_data_custom = self.auth_data.get("data", {})
     self.cache_time = self.auth_data.get('cache_time')
     if self.cache_time:
         self.cache = get_cache()
     self.module_name = auth_data_custom["function_source_uri"]
     self.clazz, self.method = auth_data_custom["response_function_name"].split(".")
     self.virtual_handler_clazz = self.create_auth_service(self.auth_data, self.module_name, self.clazz)
     if not isinstance(self.virtual_handler_clazz, CustomKeyHandler):
         raise MiddleTierException("Security handler should be inherited from security.custom.CustomKeyHandler")
     self.virtual_handler = getattr(self.virtual_handler_clazz, self.method)
     if not self.virtual_handler:
         raise MiddleTierException("Can't load method: {} {} {}".format(self.module_name, self.clazz, self.method))
Beispiel #49
0
def build(defs, component):
    '''Create an artifact for a single component and add it to the cache'''

    if get_cache(defs, component):
        return

    with claim(defs, component):
        if component.get('kind', 'chunk') == 'chunk':
            install_dependencies(defs, component)
        with timer(component, 'build of %s' % component['cache']):
            run_build(defs, component)

        with timer(component, 'artifact creation'):
            write_metadata(defs, component)
            cache(defs, component)
Beispiel #50
0
def build(defs, component):
    '''Create an artifact for a single component and add it to the cache'''

    if get_cache(defs, component):
        return

    with claim(defs, component):
        if component.get('kind', 'chunk') == 'chunk':
            install_dependencies(defs, component)
        with timer(component, 'build of %s' % component['cache']):
            run_build(defs, component)

        with timer(component, 'artifact creation'):
            write_metadata(defs, component)
            cache(defs, component)
Beispiel #51
0
 def check_authorization(self):
     token = (request.json or {}).get('auth') or \
         request.values.get('auth') or \
         request.cookies.get('auth')
     request.user = None
     request.auth = None
     if token is not None:
         user_id = get_cache(token)
         if user_id:
             try:
                 user = User.get(pk=user_id)
             except User.DoesNotExist:
                 pass
             else:
                 request.user = user
                 request.auth = token
Beispiel #52
0
    def sign_in(self, username, password):
        self.invalidate_token(
        )  # don't pass a user token when we're trying to sign in
        result = self.make_api_call(
            "/o/token/",
            data="grant_type=password&username=%s&password=%s" %
            (username, password),
            method="POST",
            data_type="application/x-www-form-urlencoded")

        if "access_token" in result:
            cache_data = cache.get_cache()
            cache_data["token"] = result["access_token"]
            cache_data["token_type"] = result["token_type"]
            cache.save_cache()
        return result
Beispiel #53
0
def get_user_tweets(api, user_id):
    '''
    api: twitter.Api oauth login object
    user_id: twitter user id as integer

    return: List of integer ids following the user_id
    '''
    tweets = get_cache(user_id, 'tweets', 'jsonlist')

    if tweets == None:
        tweets = api.GetUserTimeline(user_id = user_id,
                                     count = 200,
                                     trim_user = True)
        tweets = [tweet.AsDict() for tweet in tweets]
        make_cache(tweets, user_id, 'tweets', 'jsonlist')

    return tweets
Beispiel #54
0
def get_user_lists(api, user_id=None, screen_name=None):
    '''
    api: twitter.Api oauth login object
    user_id: twitter user id as integer (Optional)
    screen_name: twitter screen name as string (Optional)

    return: twitter.user object
    '''

    user_lists = get_cache(user_id, 'list', 'jsonlist')

    if user_lists == None:
        user_lists = api.GetListsList(None, user_id=user_id)
        user_lists = [ulist.AsDict() for ulist in user_lists]
        make_cache(user_lists, user_id, 'list', 'jsonlist')

    return user_lists
Beispiel #55
0
def assemble(defs, target):
    '''Assemble dependencies and contents recursively until target exists.'''

    if cache.get_cache(defs, target):
        return cache.cache_key(defs, target)

    component = defs.get(target)

    if component.get('arch') and component['arch'] != app.settings['arch']:
        app.log(target, 'Skipping assembly for', component.get('arch'))
        return None

    def assemble_system_recursively(system):
        assemble(defs, system['path'])
        for subsystem in system.get('subsystems', []):
            assemble_system_recursively(subsystem)

    with app.timer(component, 'Starting assembly'):
        sandbox.setup(component)
        for system_spec in component.get('systems', []):
            assemble_system_recursively(system_spec)

        dependencies = component.get('build-depends', [])
        random.shuffle(dependencies)
        for it in dependencies:
            dependency = defs.get(it)
            assemble(defs, dependency)
            sandbox.install(defs, component, dependency)

        contents = component.get('contents', [])
        random.shuffle(contents)
        for it in contents:
            subcomponent = defs.get(it)
            if subcomponent.get('build-mode') != 'bootstrap':
                assemble(defs, subcomponent)
                sandbox.install(defs, component, subcomponent)

        app.settings['counter'] += 1
        if 'systems' not in component:
            build(defs, component)
        do_manifest(component)
        cache.cache(defs, component,
                    full_root=component.get('kind') == "system")
        sandbox.remove(component)

    return cache.cache_key(defs, component)
Beispiel #56
0
 def decorator(*args, **kwargs):
     token = (request.json or {}).get('auth') or \
         request.values.get('auth') or \
         request.cookies.get('auth')
     request.user = None
     request.auth = None
     if token is not None:
         user_id = get_cache(token)
         if user_id:
             try:
                 user = User.get(pk=user_id)
             except User.DoesNotExist:
                 pass
             else:
                 request.user = user
                 request.auth = token
     return f(*args, **kwargs)
Beispiel #57
0
def compose(dn):
    '''Work through defs tree, building and assembling until target exists'''

    if type(dn) is not dict:
        dn = app.defs.get(dn)

    # if we can't calculate cache key, we can't create this component
    if cache_key(dn) is False:
        if 'tried' not in dn:
            log(dn, 'No cache_key, so skipping compose')
            dn['tried'] = True
        return False

    # if dn is already cached, we're done
    if get_cache(dn):
        return cache_key(dn)

    log(dn, "Composing", dn['name'], verbose=True)

    # if we have a kbas, look there to see if this component exists
    if config.get('kbas-url') and not config.get('reproduce'):
        with claim(dn):
            if get_remote(dn):
                config['counter'].increment()
                return cache_key(dn)

    # we only work with user-specified arch
    if 'arch' in dn and dn['arch'] != config['arch']:
        return None

    # Create composite components (strata, systems, clusters)
    systems = dn.get('systems', [])
    shuffle(systems)
    for system in systems:
        for s in system.get('subsystems', []):
            subsystem = app.defs.get(s['path'])
            compose(subsystem)
        compose(system['path'])

    with sandbox.setup(dn):
        install_contents(dn)
        build(dn)     # bring in 'build-depends', and run make

    return cache_key(dn)
Beispiel #58
0
def build(dn):
    '''Create an artifact for a single component and add it to the cache'''

    if get_cache(dn):
        return

    with claim(dn):
        if dn.get('kind', 'chunk') == 'chunk':
            install_dependencies(dn)
        with timer(dn, 'build of %s' % dn['cache']):
            run_build(dn)

        with timer(dn, 'artifact creation'):

            if dn.get('kind', 'chunk') == 'system':
                install_split_artifacts(dn)

            write_metadata(dn)
            cache(dn)