Esempio n. 1
0
    def __init__(self, id: str, app) -> None:
        self.id = id
        self.authenticated = False
        self.active = True
        self.anonymous = False
        self.client = None
        self.food = Resource(user=self, name='food', activity_name='hunting')
        self.wood = Resource(user=self, name='wood', activity_name='chopping')
        self.coal = Resource(user=self, name='coal', activity_name='coal_mining')
        self.metal = Resource(user=self, name='metal', activity_name='metal_mining')
        self.resources = [self.food, self.wood, self.coal, self.metal]

        self.mongo_client = pymongo.MongoClient(
            f"mongodb+srv://admin:{app.config['MONGO_PASSWD']}@cluster0.soder.mongodb.net/admin?retryWrites=true&w=majority"
        )
        self.db = self.mongo_client.app
        self.user_collection = self.db.users

        if not self.user_collection.count({'id': self.id}):
            user = {
                "id": self.id,
            }
            self.user_collection.insert_one(user)
            for resource in self.resources:
                resource.set_amount(0)
                resource.set_speed(0)
                resource.set_level(0)

        users[id] = self
Esempio n. 2
0
def test_resource_one_saves_another_loads(df, tmpfile):
    one = Resource(tmpfile)
    another = Resource(tmpfile)
    one.save(df)
    loaded_df = another.load()

    assert (df == loaded_df).values.all()
Esempio n. 3
0
    def test_resource_override(self):
        """Verifies that Resources in the datastore override files on disk."""
        # Should render normally.
        doc = self.go('/haiti/create')
        assert 'xyz' not in doc.content

        # This Resource should override the create.html.template file.
        bundle = ResourceBundle(key_name='1')
        key1 = Resource(parent=bundle,
                        key_name='create.html.template',
                        content='xyz{{env.repo}}xyz').put()
        doc = self.go('/haiti/create')
        assert 'xyzhaitixyz' not in doc.content  # old template is still cached

        # The new template should take effect after 1 second.
        self.advance_utcnow(seconds=1.1)
        doc = self.go('/haiti/create')
        assert 'xyzhaitixyz' in doc.content

        # A plain .html Resource should override the .html.template Resource.
        key2 = Resource(parent=bundle,
                        key_name='create.html',
                        content='xyzxyzxyz').put()
        self.advance_utcnow(seconds=1.1)
        doc = self.go('/haiti/create')
        assert 'xyzxyzxyz' in doc.content

        # After removing both Resources, should fall back to the original file.
        db.delete([key1, key2])
        self.advance_utcnow(seconds=1.1)
        doc = self.go('/haiti/create')
        assert 'xyz' not in doc.content
Esempio n. 4
0
 def parse(self):
     SECTION_ID = '#sectionId'
     USAGES = '#usages'
     resource_set = ResourceSet()
     soup = BeautifulSoup(self._html_stream, self.__class__.DEFAULT_PARSER)
     divs = soup.find_all('div', {'class': 'partTile'})
     prev_url = None
     for div in divs:
         a = div.find('a')
         a_href = a['href']
         if USAGES in a_href:
             continue
         if SECTION_ID in a_href:
             url = a_href.split('#')[0]
             if prev_url is None:
                 resource = Resource(url=url)
                 resource_set.add(resource)
                 prev_url = url
                 continue
             else:
                 if prev_url == url:
                     continue
                 else:
                     resource = Resource(url=url)
                     resource_set.add(resource)
                     prev_url = url
                     continue
         resource = Resource(url=a_href)
         resource_set.add(resource)
     return resource_set
    def load_training_from_file_list(self, filelist):
        import os
        import numpy as np
        if os.path.exists(self.preproc_cachefile):
            print(
                'preproc index already exists, not loading training features...'
            )
            allfeats = np.load(self.trainfeat_cachefile)
        elif os.path.exists(self.trainfeat_cachefile):
            print('found cached training features, loading..')
            allfeats = np.load(self.trainfeat_cachefile)
        else:
            # if os.path.exists(cache_dir):
            #     print("Loading premade training file...")
            #     return np.load(cache_dir)
            if self.train_size == -1:
                self.train_size = 50000
            if self.kp_per_file == -1:
                fname = os.path.join(filelist[0])
                if self.nonBinarizedFeatures:
                    features = Resource('', np.load(fname),
                                        'application/octet-stream')
                else:
                    features = Resource('', np.fromfile(fname, 'float32'),
                                        'application/octet-stream')
                a = self.deserializeFeatures(features)
            randomKeys = np.random.choice(len(filelist),
                                          int(self.train_size * 1.0 /
                                              self.kp_per_file),
                                          replace=False)
            print("found " + str(len(filelist)) + " keys")
            allfeats = []
            print("Loading needed files for training...")
            bar = progressbar.ProgressBar()
            for idx in bar(randomKeys):
                key = filelist[idx]
                # print(key)
                if self.nonBinarizedFeatures:
                    featureResource = Resource(key, np.load(key),
                                               'application/octet-stream')
                else:
                    featureResource = Resource(key,
                                               np.fromfile(key, 'float32'),
                                               'application/octet-stream')

                file = os.path.join(key)
                # print("loading" + file)
                try:
                    features = self.deserializeFeatures(featureResource)
                    # features = np.load(file)
                    allfeats.append(features)
                except Exception as e:
                    logging.error(traceback.format_exc())
                    print("WARNING: Couldn't load a file")
            print("Concatenating training files...")
            allfeats = np.concatenate(allfeats)
            print('training size: ', allfeats.shape[0])
            np.save(self.trainfeat_cachefile, allfeats)
        return allfeats
Esempio n. 6
0
 def __init__(self, player_id, player_name):
     self.planets = {}
     self.ships = []
     self.resources = {
         'materials': Resource('materials', 50),
         'cybernetics': Resource('cybernetics', 10),
         'food': Resource('food', 25)
     }
     self.name = player_name
     self.id = player_id
Esempio n. 7
0
 def localization_output(mask, maskThreshold=None, maskOptOut=None):
     localization = {}
     if mask is not None:
         localization['mask'] = Resource('mask', serialize_image(mask),
                                         'image/png')
     if maskThreshold:
         localization['mask_threshold'] = maskThreshold
     if maskOptOut is not None:
         localization['mask_optout"'] = Resource(
             'maskOptOut', serialize_image(maskOptOut), 'image/png')
     return localization
Esempio n. 8
0
def test_cache_many_arg_func(df, tmpfile):
    resource1 = Resource(tmpfile + '1')
    resource2 = Resource(tmpfile + '2')

    @cache(resource1, resource2)
    def process_df(df):
        df2 = 10 * df
        df3 = 20 * df
        return df2, df3

    df2, df3 = process_df(df)
    df2_from_cache, df3_from_cache = process_df(df)

    _compare_dfs(df2, df2_from_cache)
    _compare_dfs(df3, df3_from_cache)
 def __init__(self,
              port,
              address=socket.gethostname(),
              sock=None,
              indexFile=None,
              index=None,
              preproc=None,
              map=None,
              id=None):
     # Thread.__init__(self)
     if sock is None:
         self.server_socket = socket.socket(socket.AF_INET,
                                            socket.SOCK_STREAM)
         self.server_socket.setsockopt(socket.SOL_SOCKET,
                                       socket.SO_REUSEADDR, 1)
         self.server_socket.bind((address, port))
     else:
         self.server_socket = sock
     self.ex = featureExtraction()
     self.indexIsLoaded = False
     self.queryIndexObject = None
     self.port = port
     print('bound socket to ', address, ' on port ', port)
     if indexFile is not None:
         findex = open(indexFile, 'rb')
         indexResource = Resource('index', findex.read(),
                                  'application/octet-stream')
         findex.close()
         self.queryIndexObject = queryIndex(indexResource, id=id)
         self.indexIsLoaded = True
         print('index loaded!')
 def loadIndex(self, data):
     indexResource = Resource('index', data, 'application/octet-stream')
     print('starting up index...')
     # try:
     self.queryIndexObject = queryIndex(indexResource, id=self.port)
     print('index loaded!')
     self.indexIsLoaded = True
Esempio n. 11
0
def zpool_sync_resources(dispatcher, name, datasets=False):
    res_name = 'zpool:{0}'.format(name)

    def iter_dataset(ds):
        dispatcher.register_resource(Resource('zfs:{0}'.format(ds.name)),
                                     parents=[res_name])

        for i in ds.children:
            iter_dataset(i)

    try:
        zfs = libzfs.ZFS()
        pool = zfs.get(name)
    except libzfs.ZFSException:
        dispatcher.unregister_resource(res_name)
        return

    if dispatcher.resource_exists(res_name):
        dispatcher.update_resource(res_name,
                                   new_parents=get_disk_names(
                                       dispatcher, pool))
    else:
        dispatcher.register_resource(Resource(res_name),
                                     parents=get_disk_names(dispatcher, pool))

    if datasets:
        iter_dataset(pool.root_dataset)
Esempio n. 12
0
    def processImage(self, worldImage, flip=False, downsize=False):

        #extract data
        filename = worldImage.key
        imagedata = worldImage._data
        image = self.deserialize_image(imagedata)
        if downsize:
            image = self.downSize(image)
        if image is not None:
            if flip:
                image = cv2.flip(image, 0)
            #extract your features, in this example color histograms
            featuresStruct = featureExtractor.local_feature_detection_and_description(
                filename,
                self.detetype,
                self.desctype,
                self.kmax,
                image,
                mask=None,
                dense_descriptor=False,
                default_params=True)
            features = featuresStruct[1]

            #serialize your features into a Resource object that can be written to a file
            #The resource key should be the input filename
            if not features == [] and features is not None:
                totalFeatures = features.shape[1]
            else:
                features = np.zeros((0, 0), dtype='float32')
                totalFeatures = 0
            featureResource = Resource(filename,
                                       self.serializeFeature(features),
                                       'application/octet-stream')
            return self.createOutput(featureResource)
        return None
Esempio n. 13
0
def test_cache_formats_paths_from_object_properties(df, tmpfile):
    # Create resource template (path contains substitutions).
    add = 29
    mult = 7

    # Define class with a parameter.
    class Processor:
        def __init__(self, add):
            self._add = add

        # Define a method with a cacher parametrized by class's property.
        @cache(Resource(tmpfile + '_{mult}_{self._add}'))
        def process_df(self, df, mult):
            df2 = mult * df + self._add
            return df2

    processor = Processor(add)

    # Cacher loads data from the parametrized cache.
    df2 = processor.process_df(df, mult)
    df2_from_cache = processor.process_df(df, mult)
    _compare_dfs(df2, df2_from_cache)

    # Handcrafted resource opens and contains the same data.
    resource = Resource(tmpfile + '_%s_%s' % (mult, add))
    df2_from_resource = resource.load()
    _compare_dfs(df2, df2_from_resource)
Esempio n. 14
0
    def on_ready(args):
        for svc in dispatcher.datastore.query('service_definitions'):
            logger.debug('Loading service {0}'.format(svc['name']))
            enb = svc.get('builtin')

            if svc.get('auto_enable'):
                enb = False

            if dispatcher.configstore.get('service.{0}.enable'.format(
                    svc['name'])):
                enb = True

            if 'launchd' in svc and enb:
                try:
                    load_job(dispatcher, svc)
                except RpcException as err:
                    logger.error('Cannot load service {0}: {1}'.format(
                        svc['name'], err))
                    continue

            plugin.register_resource(Resource('service:{0}'.format(
                svc['name'])),
                                     parents=['system'])

        dispatcher.emit_event('service.ready', {})
Esempio n. 15
0
 def put_resource(self, bundle_name, name, cache_seconds, content):
     """Puts a Resource in the datastore for testing, and tracks it to
     be cleaned up in test teardown."""
     bundle = ResourceBundle(key_name=bundle_name)
     key = Resource(parent=bundle, key_name=name, content=content,
                    cache_seconds=float(cache_seconds)).put()
     self.temp_entity_keys.append(key)
Esempio n. 16
0
 def on_dataset_create(args):
     guid = args['guid']
     plugin.register_resource(Resource('zfs:{0}'.format(args['ds'])),
                              parents=['zpool:{0}'.format(args['pool'])])
     dispatcher.dispatch_event('zfs.pool.changed', {
         'operation': 'create',
         'ids': [guid]
     })
Esempio n. 17
0
    def show_resource(self, bundle_name, key_name, name, lang, editable):
        """Displays a single resource, optionally for editing."""
        resource = Resource.get(key_name, bundle_name) or Resource()
        content = resource.content or ''
        self.write(
            '''
<form method="post" class="%(class)s" enctype="multipart/form-data">
  <input type="hidden" name="xsrf_token" value="%(xsrf_token)s" />
  <input type="hidden" name="operation" value="put_resource">
  <input type="hidden" name="resource_bundle" value="%(bundle_name)s">
  <input type="hidden" name="resource_name" value="%(name)s">
  <input type="hidden" name="resource_lang" value="%(lang)s">
  <table cellpadding=0 cellspacing=0>
    <tr>
      <td class="warning hide-when-editable">
        This bundle cannot be edited while it is set as default.
      </td>
    </tr>
    <tr><td colspan=2>%(content_html)s</td></tr>
    <tr>
      <td style="position: relative">
        <button style="position: absolute">Replace with a file</button>
        <input type="file" name="file" class="hide-when-readonly"
            onchange="document.forms[0].submit()"
            style="position: absolute; opacity: 0; z-index: 1">
      </td>
      <td style="text-align: right">
        Cache seconds: <input %(maybe_readonly)s size=4
            name="cache_seconds" value="%(cache_seconds).1f">
      </td>
    </tr>
    <tr class="hide-when-readonly">
      <td>
        <button onclick="delete_resource()">Delete resource</button>
      </td>
      <td style="text-align: right">
        <input type="submit" name="save_content" value="Save resource">
      </td>
    </tr>
  </table>
</form>
<script>
function delete_resource() {
  if (confirm('Really delete %(name)s?')) {
    document.forms[0].operation.value = 'delete_resource';
    document.forms[0].submit();
  }
}
</script>''' % {
                'class': editable and 'editable' or 'readonly',
                'bundle_name': bundle_name,
                'name': name,
                'lang': lang,
                'content_html': format_content_html(content, name, editable),
                'cache_seconds': resource.cache_seconds,
                'maybe_readonly': not editable and 'readonly' or '',
                'xsrf_token': self._get_xsrf_token()
            })
Esempio n. 18
0
 def queryFeatures(self,
                   queryFeatures,
                   numberOfResultsToRetrieve,
                   ignoreIDs=[]):
     allresults = []
     # for i in queryImages:
     #    allresults.append(filteringResults())
     # TODO:Feature concatination for faster query batches
     # concatinate features
     # allFeats = []
     # featureExtractor = featureExtraction()
     # for feature in queryFeatures:
     #     allFeats.append(self.deserializeFeatures(feature))
     # allFeats = np.concatenate(allFeats,axis=0)
     # allFeatsResource = featureExtractor.createOutput(Resource("", featureExtractor.serializeFeature(allFeats), 'application/octet-stream'))
     # allFeatsResource = ['supplemental_information']['value']
     if self.useServers:
         # NOT FOR USE WITH SCALABLE API, FOR USE ONLY ON LOCAL NOTRE DAME SERVERS. PLEASE SEE ELSE STATEMENT.
         port = 8000
         for feature in queryFeatures:
             # print('running image ')
             if self.serversAndPorts is not None and not self.serversAndPorts == []:
                 allIndexResults = Parallel(
                     n_jobs=len(self.serversAndPorts))(
                         delayed(runQuery)(feature._data,
                                           numberOfResultsToRetrieve, port,
                                           'Features', address)
                         for port, address in self.serversAndPorts)
             else:
                 allIndexResults = Parallel(n_jobs=len(self.indexFiles))(
                     delayed(
                         runQuery)(feature._data, numberOfResultsToRetrieve,
                                   port, 'Features', self.defaultServerName)
                     for port in range(port, port + len(self.indexFiles)))
             result = filteringResults()
             bar = progressbar.ProgressBar()
             for r in bar(allIndexResults):
                 result.mergeScores(r, ignoreIDs=ignoreIDs)
                 # print(result.scores)
             allresults.append(result)
     else:
         for i in queryFeatures:
             allresults.append(filteringResults())
         for index in self.indexFiles:
             indexfile = open(index, 'rb')
             indexResource = Resource('index', indexfile.read(),
                                      'application/octet-stream')
             # curQuery = queryIndex(indexResource)
             if not self.isTest and self.curQuery is None:
                 print('initializing a new query index....')
                 self.curQuery = queryIndex(indexResource)
             c = 0
             for feature in queryFeatures:
                 result = self.curQuery.queryFeatures(
                     feature, numberOfResultsToRetrieve)
                 allresults[c].mergeScores(result, ignoreIDs=ignoreIDs)
                 c = c + 1
     return allresults
Esempio n. 19
0
    def dance(self, smburl, depth=0):
        print "[%d] %s" % (self.ident, smburl)
        if depth > 10:  #maximum recursion depth
            return

        try:
            entries = self.ctx.opendir(smburl).getdents()
        except:
            return

        for e in entries:
            try:
                if e.smbc_type < 0 or e.name.startswith('.'):
                    continue
            except:
                continue

            #3L: file share 7L: directory
            if e.smbc_type == smbc.FILE_SHARE or e.smbc_type == 7L:
                try:
                    r = Resource()
                    r.uri = smburl
                    r.server = self.target
                    r.comment = e.comment
                    r.filetype = "directory"
                    try:
                        self.silos.addRes(r)
                    except:
                        print r
                    self.dance(smburl + "/" + e.name, depth + 1)
                except:
                    pass
                    #raise
            elif e.smbc_type == 8:
                try:
                    r = Resource()
                    r.uri = smburl + "/" + e.name
                    r.server = self.target
                    r.filesize = self.ctx.stat(r.uri)[6]
                    try:
                        self.silos.addRes(r)
                    except:
                        print r
                except:
                    pass
Esempio n. 20
0
    class Processor:
        def __init__(self, add):
            self._add = add

        # Define a method with a cacher parametrized by class's property.
        @cache(Resource(tmpfile + '_{mult}_{self._add}'))
        def process_df(self, df, mult):
            df2 = mult * df + self._add
            return df2
    def test_resource_caching(self):
        """Verifies that Resources are cached properly."""
        # There's no file here.
        self.go('/global/foo.txt')
        assert self.s.status == 404
        self.go('/global/foo.txt?lang=fr')
        assert self.s.status == 404

        # Add a Resource to be served as the static file.
        bundle = ResourceBundle(key_name='1')
        Resource(parent=bundle, key_name='static/foo.txt',
                 content='hello').put()
        doc = self.go('/global/foo.txt?lang=fr')
        assert doc.content_bytes == 'hello'

        # Add a localized Resource.
        fr_key = Resource(parent=bundle,
                          key_name='static/foo.txt:fr',
                          content='bonjour').put()
        doc = self.go('/global/foo.txt?lang=fr')
        assert doc.content_bytes == 'hello'  # original Resource remains cached

        # The cached version should expire after 1 second.
        self.advance_utcnow(seconds=1.1)
        doc = self.go('/global/foo.txt?lang=fr')
        assert doc.content_bytes == 'bonjour'

        # Change the non-localized Resource.
        Resource(parent=bundle, key_name='static/foo.txt',
                 content='goodbye').put()
        doc = self.go('/global/foo.txt?lang=fr')
        assert doc.content_bytes == 'bonjour'
        # no effect on the localized Resource

        # Remove the localized Resource.
        db.delete(fr_key)
        doc = self.go('/global/foo.txt?lang=fr')
        assert doc.content_bytes == 'bonjour'
        # localized Resource remains cached

        # The cached version should expire after 1 second.
        self.advance_utcnow(seconds=1.1)
        doc = self.go('/global/foo.txt?lang=fr')
        assert doc.content_bytes == 'goodbye'
Esempio n. 22
0
def concatFeatures(self, r1, r2):
    featureExtractor = featureExtraction()
    cat = np.vstack(
        (self.deserializeFeatures(r1['supplemental_information']['value']),
         self.deserializeFeatures(r2['supplemental_information']['value'])))
    filename = r1['supplemental_information']['value'].key
    featureResource = Resource(filename,
                               featureExtractor.serializeFeature(cat),
                               'application/octet-stream')
    return featureExtractor.createOutput(featureResource)
Esempio n. 23
0
def _init(dispatcher, plugin):
    last_sysds_name = ''

    def on_volumes_changed(args):
        if args['operation'] == 'create':
            pass

    def on_datasets_changed(args):
        nonlocal last_sysds_name

        if args['operation'] != 'update':
            return

        for i in args['entities']:
            if '.system-' in i['id']:
                if i['mountpoint'] == SYSTEM_DIR:
                    if i['id'] != last_sysds_name:
                        dispatcher.update_resource(
                            'system-dataset',
                            new_parents=['zfs:{0}'.format(i['id'])])
                        last_sysds_name = i['id']
                        return

    def volume_pre_destroy(args):
        # Evacuate .system dataset from the pool
        if dispatcher.configstore.get('system.dataset.pool') == args['name']:
            dispatcher.call_task_sync('system_dataset.migrate', 'freenas-boot')

        return True

    if not dispatcher.configstore.get('system.dataset.id'):
        dsid = uuid.uuid4().hex[:8]
        dispatcher.configstore.set('system.dataset.id', dsid)
        logger.info('New system dataset ID: {0}'.format(dsid))

    pool = dispatcher.configstore.get('system.dataset.pool')
    dsid = dispatcher.configstore.get('system.dataset.id')
    dispatcher.register_resource(
        Resource('system-dataset'),
        parents=['zfs:{0}/.system-{1}'.format(pool, dsid)])
    last_sysds_name = '{0}/.system-{1}'.format(pool, dsid)

    plugin.register_event_handler('volume.changed', on_volumes_changed)
    plugin.register_event_handler('entity-subscriber.zfs.dataset.changed',
                                  on_datasets_changed)
    plugin.attach_hook('volume.pre_destroy', volume_pre_destroy)
    plugin.attach_hook('volume.pre_detach', volume_pre_destroy)
    plugin.attach_hook('volume.pre_rename', volume_pre_destroy)
    plugin.register_provider('system_dataset', SystemDatasetProvider)
    plugin.register_task_handler('system_dataset.migrate',
                                 SystemDatasetConfigure)
    plugin.register_task_handler('system_dataset.import', SystemDatasetImport)

    plugin.register_hook('system_dataset.pre_detach')
    plugin.register_hook('system_dataset.pre_attach')
Esempio n. 24
0
 def work_on(self, dt: float, new_type: TileType) -> bool:
     """Returns if the job is done."""
     self.hp -= dt
     if self.hp <= 0:
         # Spawn resources
         if self.type == TileType.WATER:
             self._world.resources.add(
                 Resource(ResourceType.WATER, self.x // 16, self.y // 16))
         elif self.type == TileType.TREE:
             self._world.resources.add(
                 Resource(ResourceType.WOOD, self.x // 16, self.y // 16))
         elif self.type == TileType.STONE:
             self._world.resources.add(
                 Resource(ResourceType.STONE, self.x // 16, self.y // 16))
         # Switch to a new type
         self.type = new_type
         self.hp = TILE_COSTS[self.type]
         self.image = TILE_IMAGES[self.type]
         return True
     return False
Esempio n. 25
0
    def gain_costs(self, cost):
        msg = ''
        for res in cost:
            if res not in self.resources:
                self.resources[res] = {res: Resource(res, cost[res])}
            else:
                self.resources[
                    res].amount = self.resources[res].amount + cost[res]

            msg = msg + str(cost[res]) + resource_types[res][1]
        return msg
Esempio n. 26
0
    def on_device_attached(args):
        path = args['path']
        if re.match(r'^/dev/(da|ada|vtbd|multipath/multipath)[0-9]+$', path):
            if not dispatcher.resource_exists('disk:{0}'.format(path)):
                dispatcher.register_resource(Resource('disk:{0}'.format(path)))

        if re.match(r'^/dev/(da|ada|vtbd)[0-9]+$', path):
            # Regenerate disk cache
            logger.info("New disk attached: {0}".format(path))
            with dispatcher.get_lock('diskcache:{0}'.format(path)):
                generate_disk_cache(dispatcher, path)
Esempio n. 27
0
 def __init__(self, indexDir, outputImageDir):
     self.indexFiles = []
     for filename in os.listdir(indexDir):
         self.indexFiles.append(os.path.join(indexDir, filename))
     self.imageDirectory = outputImageDir
     if self.isTest:
         indexfile = open(self.indexFiles[0], 'rb')
         indexResource = Resource('index', indexfile.read(),
                                  'application/octet-stream')
         self.curQuery = queryIndex(indexResource)
         self.currentQueryFeatureResource = None
 def trainIndex(self, featurelistfile):
     print('train index')
     filelist = []
     with open(featurelistfile) as featurelist:
         for f in featurelist:
             filelist.append(f.rstrip())
     xt = self.load_training_from_file_list(filelist)
     trainedParams = self.trainIndexWithFeatures(xt)
     data = self.zipBinaryTrainingParams(trainedParams[0], trainedParams[1],
                                         trainedParams[2])
     print('finished training')
     return Resource("Parameters", data, 'application/octet-stream')
Esempio n. 29
0
def test_cache_saves_and_loads(df, tmpfile):
    resource = Resource(tmpfile)

    @cache(resource)
    def process_df(df):
        df2 = 10 * df
        return df2

    df2 = process_df(df)
    df2_from_cache = process_df(df)

    _compare_dfs(df2, df2_from_cache)
Esempio n. 30
0
    def dance(self, ftpurl, depth=0):
        print ftpurl
        if not self.ftp:
            return

        if depth > 10:  #maximum recursion depth
            return

        m = re.search(".*://[^/]*(/.*)", ftpurl)
        if m and len(m.group(1)) > 0:  # the rest of the uri
            currentdir = m.group(1)
        else:
            currentdir = "/"

        entries = []
        try:
            self.ftp.cwd(currentdir)
            self.ftp.retrlines('LIST', lambda s: entries.append(Entry(s)))
        except:
            return

        for e in entries:
            if e.name.startswith('.'):
                continue
            if e.isdir:
                r = Resource()
                r.uri = ftpurl
                r.server = self.target
                #								try:
                #										self.silos.addRes(r)
                #								except:
                self.dance(ftpurl + "/" + e.name, depth + 1)
            else:
                r = Resource()
                r.uri = ftpurl + "/" + e.name
                r.server = self.target
                try:
                    self.silos.addRes(r)
                except:
                    pass