def impl_test_apply(self, data_name, test_name): work_dir = posixpath.join(work_root, test_name) if posixpath.isdir(work_dir): shutil.rmtree(work_dir) os.makedirs(work_dir) test_data = posixpath.join(test_data_root, data_name) music1_dir = posixpath.join(work_dir, 'music1') result1_dir = posixpath.join(work_dir, 'result1') music2_dir = posixpath.join(work_dir, 'music2') result2_dir = posixpath.join(work_dir, 'result2') create_collection(posixpath.join(test_data, 'image1.json'), result1_dir, music1_dir, 'data.json') create_collection(posixpath.join(test_data, 'image2.json'), result2_dir, music2_dir, 'data.json') snapshots1 = Snapshots(result1_dir) collection1 = Collection(snapshots1, music1_dir) snapshots2 = Snapshots(result2_dir) Collection(snapshots1, music2_dir) collection1.apply_snapshot(snapshots2.load('data.json')) collection1.remove_unused_pictures() snapshots1.save(collection1.state, 'data.json') self.folders_equal(music1_dir, music2_dir) self.folders_equal(posixpath.join(result1_dir, 'pictures'), posixpath.join(result2_dir, 'pictures')) self.snapshots_equal(posixpath.join(result1_dir, 'data.json'), posixpath.join(result2_dir, 'data.json'))
def upload(self): if not self._dirty: return Collection.upload( self ) gl.glActiveTexture( gl.GL_TEXTURE2 ) data = self._gbuffer.data.view(np.float32) shape = len(self._gbuffer), 4*1024 if not self._gbuffer_id: self._gbuffer_id = gl.glGenTextures(1) gl.glBindTexture( gl.GL_TEXTURE_2D, self._gbuffer_id ) gl.glPixelStorei( gl.GL_UNPACK_ALIGNMENT, 1 ) gl.glPixelStorei( gl.GL_PACK_ALIGNMENT, 1 ) gl.glTexParameterf( gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_NEAREST ) gl.glTexParameterf( gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_NEAREST ) gl.glTexParameterf( gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP_TO_EDGE ) gl.glTexParameterf( gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP_TO_EDGE ) gl.glTexParameterf(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_BASE_LEVEL, 0) gl.glTexParameterf(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAX_LEVEL, 0) gl.glTexImage2D( gl.GL_TEXTURE_2D, 0, gl.GL_RGBA32F, shape[1]//4, shape[0], 0, gl.GL_RGBA, gl.GL_FLOAT, data ) gl.glActiveTexture( gl.GL_TEXTURE2 ) gl.glBindTexture( gl.GL_TEXTURE_2D, self._gbuffer_id ) gl.glTexImage2D( gl.GL_TEXTURE_2D, 0, gl.GL_RGBA32F, shape[1]//4, shape[0], 0, gl.GL_RGBA, gl.GL_FLOAT, data ) self._dirty = False
def add_child(self, name, child): Collection.add_child(self, name, child) names = self.get_ordered_names() if names is not None: names.append(name) self._ordered_names = names self.save()
def __init__(self): self.vtype = np.dtype([('a_curr', 'f4', 3), ('a_texcoord', 'f4', 2)]) self.utype = np.dtype([ ('fg_color', 'f4', 4), ('length', 'f4', 1), ('linewidth', 'f4', 1), ('antialias', 'f4', 1), ('dash_phase', 'f4', 1), ('dash_period', 'f4', 1), ('dash_index', 'f4', 1), ('dash_caps', 'f4', 2) ]) Collection.__init__(self, self.vtype, self.utype) self.dash_atlas = DashAtlas() dsize = self.vbuffer._dsize a_curr = self.vbuffer.attribute('a_curr') a_texcoord = self.vbuffer.attribute('a_texcoord') a_index = self.vbuffer.attribute('a_index') a_next = VertexAttribute('a_next', a_curr.count, a_curr.gltype, a_curr.stride, a_curr.offset) a_prev = VertexAttribute('a_prev', a_curr.count, a_curr.gltype, a_curr.stride, a_curr.offset) self.attributes.extend([a_prev, a_next]) a_index.offset += 2 * dsize a_curr.offset += 2 * dsize a_texcoord.offset += 2 * dsize a_next.offset += 4 * dsize shaders = os.path.join(os.path.dirname(__file__), '.') vertex_shader = os.path.join(shaders, 'dash-lines.vert') fragment_shader = os.path.join(shaders, 'dash-lines.frag') self.shader = Shader( open(vertex_shader).read(), open(fragment_shader).read())
def append( self, centers=(0, 0, 0), radius=3.0, fg_color=(0, 0, 0, 1), bg_color=(1, 1, 1, 1), linewidth=1.5, antialias=1.5, translate=(0, 0, 0), scale=1.0, rotate=0.0 ): centers = np.atleast_2d(np.array(centers)) n = len(centers) V = np.zeros(4*n, self.vtype) U = np.zeros(n, self.utype) V['a_center'][0::4] = centers V['a_center'][1::4] = V['a_center'][::4] V['a_center'][2::4] = V['a_center'][::4] V['a_center'][3::4] = V['a_center'][::4] V['a_texcoord'][0::4] = -1, -1 V['a_texcoord'][1::4] = -1, +1 V['a_texcoord'][2::4] = +1, -1 V['a_texcoord'][3::4] = +1, +1 U['fg_color'][:] = fg_color U['bg_color'][:] = bg_color U['radius'][:] = radius U['scale'][:] = scale U['linewidth'][:] = linewidth U['antialias'][:] = antialias I = np.resize(np.array([0,1,2,1,2,3], dtype=np.uint32), n*(2*3)) Collection.append(self, V, I, U, (4,6) )
def __init__(self, base): Collection.__init__(self, Artist) self.base = base self.model = base.model self.populate()
def Initialize(credentials="persistent", opt_url=None): """Initialize the EE library. If this hasn't been called by the time any object constructor is used, it will be called then. If this is called a second time with a different URL, this doesn't do an un-initialization of e.g.: the previously loaded Algorithms, but will overwrite them and let point at alternate servers. Args: credentials: OAuth2 credentials. 'persistent' (default) means use credentials already stored in the filesystem, or raise an explanatory exception guiding the user to create those credentials. opt_url: The base url for the EarthEngine REST API to connect to. """ if credentials == "persistent": credentials = _GetPersistentCredentials() data.initialize(credentials, (opt_url + "/api" if opt_url else None), opt_url) # Initialize the dynamically loaded functions on the objects that want them. ApiFunction.initialize() Element.initialize() Image.initialize() Feature.initialize() Collection.initialize() ImageCollection.initialize() FeatureCollection.initialize() Filter.initialize() Geometry.initialize() List.initialize() Number.initialize() String.initialize() Date.initialize() Dictionary.initialize() Terrain.initialize() _InitializeGeneratedClasses() _InitializeUnboundMethods()
def run_pygame() -> None: """run game""" # variables for game variables = Variables() # create game window screen = pygame.display.set_mode( variables.display.dimensions.width_height()) # create a ship for player ship = Ship(screen) # create a bullet group bullets = Collection(screen) # create a alien bullets group aliens = Collection(screen) # create an alient fleet game.create_fleet(variables, screen, aliens) # main logic while True: # check for new events game.check_events(ship, bullets) # update objects game.update_objects(ship, aliens, bullets) # update display game.update_screen(screen, ship, aliens, bullets)
def __init__(self, dash_atlas=None): self.dash_atlas = dash_atlas self.vtype = np.dtype([('a_center', 'f4', 2), ('a_texcoord', 'f4', 2)]) self.utype = np.dtype([('fg_color', 'f4', 4), ('bg_color', 'f4', 4), ('translate', 'f4', 2), ('scale', 'f4', 1), ('rotate', 'f4', 1), ('radius', 'f4', 1), ('linewidth', 'f4', 1), ('antialias', 'f4', 1), ('dash_phase', 'f4', 1), ('dash_period', 'f4', 1), ('dash_index', 'f4', 1), ('dash_caps', 'f4', 2)]) Collection.__init__(self, self.vtype, self.utype) if dash_atlas is None: self.dash_atlas = DashAtlas() else: self.dash_atlas = dash_atlas shaders = os.path.join(os.path.dirname(__file__),'shaders') vertex_shader= os.path.join( shaders, 'circles.vert') fragment_shader= os.path.join( shaders, 'circles.frag') self.shader = Shader( open(vertex_shader).read(), open(fragment_shader).read() )
def __init__(self): self.vtype = np.dtype([('a_curr', 'f4', 3), ('a_texcoord', 'f4', 2)]) self.utype = np.dtype([('fg_color', 'f4', 4), ('length', 'f4', 1), ('linewidth', 'f4', 1), ('antialias', 'f4', 1), ('caps', 'f4', 2)]) Collection.__init__(self, self.vtype, self.utype) dsize = self.vbuffer._dsize a_curr = self.vbuffer.attribute('a_curr') a_texcoord = self.vbuffer.attribute('a_texcoord') a_index = self.vbuffer.attribute('a_index') a_next = VertexAttribute( 'a_next', a_curr.count, a_curr.gltype, a_curr.stride, a_curr.offset) a_prev = VertexAttribute( 'a_prev', a_curr.count, a_curr.gltype, a_curr.stride, a_curr.offset) self.attributes.extend([a_prev, a_next]) a_index.offset += 2*dsize a_curr.offset += 2*dsize a_texcoord.offset += 2*dsize a_next.offset += 4*dsize shaders = os.path.join(os.path.dirname(__file__),'.') vertex_shader= os.path.join( shaders, 'lines.vert') fragment_shader= os.path.join( shaders, 'lines.frag') self.shader = Shader( open(vertex_shader).read(), open(fragment_shader).read() )
def __init__(self, dash_atlas=None): self.vtype = np.dtype([('a_position', 'f4', 2), ('a_segment', 'f4', 2), ('a_angles', 'f4', 2), ('a_tangents', 'f4', 4), ('a_texcoord', 'f4', 2)]) self.utype = np.dtype([('color', 'f4', 4), ('translate', 'f4', 2), ('scale', 'f4', 1), ('rotate', 'f4', 1), ('linewidth', 'f4', 1), ('antialias', 'f4', 1), ('linecaps', 'f4', 2), ('linejoin', 'f4', 1), ('miter_limit', 'f4', 1), ('length', 'f4', 1), ('dash_phase', 'f4', 1), ('dash_period', 'f4', 1), ('dash_index', 'f4', 1), ('dash_caps', 'f4', 2), ('closed', 'f4', 1)]) Collection.__init__(self, self.vtype, self.utype) shaders = os.path.join(os.path.dirname(__file__), 'shaders') vertex_shader = os.path.join(shaders, 'dash-lines-2D.vert') fragment_shader = os.path.join(shaders, 'dash-lines-2D.frag') #fragment_shader= os.path.join( shaders, 'test.frag') if dash_atlas is None: self.dash_atlas = DashAtlas() else: self.dash_atlas = dash_atlas self.shader = Shader( open(vertex_shader).read(), open(fragment_shader).read())
def Initialize(credentials=None, opt_url=None): """Initialize the EE library. If this hasn't been called by the time any object constructor is used, it will be called then. If this is called a second time with a different URL, this doesn't do an un-initialization of e.g.: the previously loaded Algorithms, but will overwrite them and let point at alternate servers. Args: credentials: OAuth2 credentials. opt_url: The base url for the EarthEngine REST API to connect to. """ data.initialize(credentials, (opt_url + '/api' if opt_url else None), opt_url) # Initialize the dynamically loaded functions on the objects that want them. ApiFunction.initialize() Element.initialize() Image.initialize() Feature.initialize() Collection.initialize() ImageCollection.initialize() FeatureCollection.initialize() Filter.initialize() Geometry.initialize() List.initialize() Number.initialize() String.initialize() Date.initialize() Dictionary.initialize() _InitializeGeneratedClasses() _InitializeUnboundMethods()
def append( self, points, fg_color=(0, 0, 0, 1), linewidth=1.0, antialias=1.0, caps = ('round','round')): P = np.array(points).astype(np.float32) n = len(P) V = np.zeros(2*(n+2),self.vtype) U = np.zeros(1, self.utype) I = (np.ones((2*n-2,3),dtype=np.uint32)*[0,1,2]).ravel() I += np.repeat(np.arange(2*n-2),3) I = I.ravel() D = ((P[:-1]-P[1:])**2).sum(axis=1) D = np.sqrt(D).cumsum().astype(np.float32) U['fg_color'] = fg_color U['linewidth'] = linewidth U['antialias'] = antialias U['length'] = D[-1] U['caps'] = (self.caps.get(caps[0], 'round'), self.caps.get(caps[1], 'round')) V['a_curr'][2:2+2*n:2] = P V['a_curr'][3:3+2*n:2] = P V['a_curr'][:2] = P[0] - (P[ 1] - P[ 0]) V['a_curr'][-2:] = P[-1] + (P[-1] - P[-2]) V['a_texcoord'][4:4+2*(n-1):2,0] = D V['a_texcoord'][5:5+2*(n-1):2,0] = D V['a_texcoord'][0::2,1] = -1 V['a_texcoord'][1::2,1] = +1 Collection.append(self, V, I, U )
def main(): print 'Creating a List-Implemented Collection...' collection = Collection(ListCollectionImp) example(collection) print 'Creating a Dict-Implemented Collection...' collection = Collection(DictCollectionImp) example(collection)
def setUp(self): # Get the sample JSON data self.data = requests.get( "http://samples.openweathermap.org/data/2.5/weather?zip=94040,us&appid=b6907d289e10d714a6e88b30761fae22" ).json() self.coll = Collection( getlist=["weather.main", "main.temp", "clouds.all", "doesntExist"]) self.dcoll = Collection()
def __init__(self,docsURLs=[],docsTxts=[],docsColl=[]): if docsColl: self.coll = docsColl elif docsURLs: if docsTxts: self.coll = Collection(docsURLs,docsTxts) else: self.coll = Collection(docsURLs)
def test_that_two_collections_concatenation_contains_the_elements_of_the_two_collections(a_list, another_list): collection = Collection(a_list) another_collection = Collection(another_list) new_collection = collection.concat(another_collection) for element in a_list: new_collection = new_collection.remove(element) for element in another_list: new_collection = new_collection.remove(element) assert len(new_collection) == 0
def __init__(self, args): # paths to data files self.train_path = args.wnut + "/" + const.TRAIN_FILE self.dev_path = args.wnut + "/" + const.DEV_FILE self.test_path = args.wnut + "/" + const.TEST_FILE self.train_path_orth = args.wnut + "/" + const.ORTH_TRAIN_FILE self.dev_path_orth = args.wnut + "/" + const.ORTH_DEV_FILE self.test_path_orth = args.wnut + "/" + const.ORTH_TEST_FILE self.emb_path = args.emb_path # bells and whistles of the model self.fine_tune = args.fine_tune self.oov = args.oov self.emb_to_use = args.emb # hyperparameters self.orth_word_emb_dim = args.orth_word_emb_dim self.word_emb_dim = 0 # objects to store collection of words, orthographic words, and labels self.word_collection = Collection("word") self.orth_word_collection = Collection("orth_word") self.label_collection = Collection("label") self.char_collection = Collection("char") self.orth_char_collection = Collection("orth_char") self.logger = logger # vars to store the data from the files self.words = {} self.orth_words = {} self.labels = {} self.word_indices = {} self.orth_word_indices = {} self.label_indices = {} self.char_indices = {} self.orth_char_indices = {} # store all the embeddings in this dict self.emb = {} self.emb_table = {} # lengths of the sentences self.sequence_lengths = {} self.word_lengths = {} self.max_lengths = {} self.max_sentence_length = 0 self.max_word_length = 0 self.num_labels = 0 # for final train, dev and test data self.train_data = {} self.dev_data = {} self.test_data = {}
def display_collection(self, id): collection = Collection.get_by_id(self.db, id); if collection == None: raise Exception("No collection with id {}!".format(id)) Collection.print_table_header() collection.print_for_table() items = Item.get_by_collection(self.db, collection.id) print("ITEMS") Item.print_table_header() for item in items: item.print_for_table()
def append( self, center=(0, 0), radius=100.0, color=(0, 0, 0, 1), linewidth=1.0, antialias=1.0, translate=(0, 0), scale=1.0, rotate=0.0 ): V, I, _ = self.bake(center) U = np.zeros(1, self.utype) U['linewidth'] = linewidth U['antialias'] = antialias U['color'] = color U['translate'] = translate U['scale'] = scale U['rotate'] = rotate U['radius'] = radius Collection.append(self, V, I, U)
def __init__(self, pid, stats=True, auth=None, def_name=None, cache=False): # set project self._cfile = Ipy.CCH_DIR + '/' + pid + '.json' project = None if cache and os.path.isfile(self._cfile): # try load from cache if given try: project = json.load(open(self._cfile, 'rU')) if Ipy.DEBUG: sys.stdout.write( "project %s loaded from cached file (%s)\n" % (pid, self._cfile)) except: pass if project is None: # load from api project = self._get_project(pid, auth) if project and cache and os.path.isdir(Ipy.CCH_DIR): # save to cache if given try: json.dump(project, open(self._cfile, 'w')) if Ipy.DEBUG: sys.stdout.write( "project %s saved to cached file (%s)\n" % (mgid, self._cfile)) except: pass if project is not None: for key, val in project.iteritems(): setattr(self, key, val) else: self.id = pid self.name = None return # hack to get variable name if def_name == None: try: (filename, line_number, function_name, text) = traceback.extract_stack()[-2] def_name = text[:text.find('=')].strip() except: pass self.defined_name = def_name # call collection init - from cache if given Collection.__init__(self, self.mgids(), stats=stats, auth=auth, def_name=self.defined_name, cache=cache)
def __init__(self): self.vtype = np.dtype([('a_position', 'f4', 2), ('a_segment', 'f4', 2), ('a_angles', 'f4', 2), ('a_tangents', 'f4', 4), ('a_texcoord', 'f4', 2)]) self.utype = np.dtype([('color', 'f4', 4), ('translate', 'f4', 2), ('scale', 'f4', 1), ('rotate', 'f4', 1), ('linewidth', 'f4', 1), ('antialias', 'f4', 1), ('length', 'f4', 1), ('closed', 'f4', 1)]) Collection.__init__(self, self.vtype, self.utype) shaders = os.path.join(os.path.dirname(__file__), 'shaders') vertex_shader = os.path.join(shaders, 'raw-lines-2D.vert') fragment_shader = os.path.join(shaders, 'raw-lines-2D.frag') self.shader = Shader( open(vertex_shader).read(), open(fragment_shader).read())
def main(): c = Collection(1, 2, 3) assert c == c.map(lambda x: x) assert Collection(2) == c.filter(lambda x: x % 2 == 0) assert 6 == c.reduce(0, lambda x, y: x + y) assert Collection(1, 2) == c.take(2) assert Collection(3) == c.drop(2) assert Collection(0, 0, 0) == Collection(0, 0, 0, 1).take_while(lambda x: x == 0) assert Collection(1) == Collection(0, 0, 0, 1).drop_while(lambda x: x == 0) assert Collection(1).search(1) print('tests passed !')
def partitionCollection(K): """Partition the collection according to node adjacency. The actor numbers will be connected to a collection of property numbers, e.g. 0 [1 [4,12] 2 [6,20]], where 0 is the actor number, 1 and 2 are the property numbers and 4, 12, 6 and 20 are the element numbers. """ sel = getCollection(K) if len(sel) == 0: print("Nothing to partition!") return if K.obj_type == 'actor': actor_numbers = K.get(-1,[]) K.clear() for i in actor_numbers: K.add(range(sel[int(i)].nelems()),i) prop = 1 j = 0 for i in K.keys(): p = sel[j].partitionByConnection() + prop print("Actor %s partitioned in %s parts" % (i,p.max()-p.min()+1)) C = Collection() C.set(transpose(asarray([p,K[i]]))) K[i] = C prop += p.max()-p.min()+1 j += 1 K.setType('partition')
def test_get_latest(self): file_name = "./{}.sqlite".format(str(uuid.uuid4())) db = Collection(file_name) latest = 10 for num in range(1, latest + 1): db.add_comic({ "number": num, "img_url": "https://www.google.com", "title": "A Title", "alt": "Some alt-text", "transcript": "Hoi hoi" }) db.add_comic({ "number": 0, "img_url": "https://www.google.com", "title": "A Title", "alt": "Some alt-text", "transcript": "Hoi hoi" }) comic = db.get_latest() self.assertEqual(comic["number"], latest) os.remove(file_name)
def read_data(): # Use test data test_collection = Collection("test_data/test_collection") posting_lists = [] for _, pl in enumerate(test_collection): posting_lists.append(np.unique(np.array(pl[0], dtype=np.uint8))) return posting_lists
def __init__(self): self.vtype = np.dtype([('a_center', 'f4', 3), ('a_texcoord', 'f4', 2)]) self.utype = np.dtype([('fg_color', 'f4', 4), ('bg_color', 'f4', 4), ('translate', 'f4', 3), ('scale', 'f4', 1), ('radius', 'f4', 1), ('linewidth', 'f4', 1), ('antialias', 'f4', 1)]) Collection.__init__(self, self.vtype, self.utype) shaders = os.path.join(os.path.dirname(__file__),'.') vertex_shader= os.path.join( shaders, 'scatter.vert') fragment_shader= os.path.join( shaders, 'scatter.frag') self.shader = Shader( open(vertex_shader).read(), open(fragment_shader).read() )
def test_text_splitter_dict(): t = transfers = ''' # January bob = $1,000 # from Bob ted = -$500 # to Ted # February carol = $750 # from Carol alice = -$1250 # to Alice ''' T = Collection(t, split_lines, cull=True, strip=True, comment='#', sep='=') d = dict(bob='$1,000', ted='-$500', carol='$750', alice='-$1250') assert list(m for m in T) == list(d.values()) assert T.values() == list(d.values()) assert T.keys() == list(d.keys()) assert T.items() == list((k, v) for k, v in d.items()) assert len(T) == len(d) assert '$1,000' in T assert '-$500' in T assert '$750' in T assert '-$1250' in T assert 'upsilon' not in T assert T['bob'] == '$1,000' assert T['ted'] == '-$500' assert T['carol'] == '$750' assert T['alice'] == '-$1250' with pytest.raises(KeyError) as exception: T['jeff'] assert str(exception.value) == "'jeff'"
def __init__(self, collection=None, records=None, dm=None, metric='geo', **kwargs): """ Initialisation: A fully-formed Collection object can be given, or a list of records. If a list of records is provided a Collection object is built from it, with **kwargs being passed to the constructor. Additionally a distance matrix can be passed, or else one is constructed using the metric specified (default=geodesic) """ if records: self.collection = Collection(records, **kwargs) else: self.collection = collection if dm is not None: self.dm = dm else: self.dm = self.calc_dm(metric) self._warnings()
def test_generator(): L = Collection(range(5)) l = list(range(5)) assert list(L) == l assert list(m for m in L) == l assert L.values() == l assert L.keys() == list(range(len(l))) assert L.items() == [(k, v) for k, v in zip(range(len(l)), l)] assert len(L) == len(l) assert 0 in L assert 1 in L assert 2 in L assert 3 in L assert 4 in L assert 5 not in L assert L[0] == 0 assert L[1] == 1 assert L[2] == 2 assert L[3] == 3 assert L[4] == 4 assert L[-1] == 4 assert L[-2] == 3 assert L[-3] == 2 assert L[-4] == 1 assert L[-5] == 0 with pytest.raises(IndexError) as exception: L[5] assert str(exception.value) == 'range object index out of range'
def Reset(): """Reset the library. Useful for re-initializing to a different server.""" data.reset() ApiFunction.reset() Image.reset() Feature.reset() Collection.reset() ImageCollection.reset() FeatureCollection.reset() Filter.reset() Geometry.reset() Number.reset() String.reset() _ResetGeneratedClasses() global Algorithms Algorithms = _AlgorithmsContainer()
def test_append_several_item_2(self): C = Collection(vtype, utype) C.append(np.zeros(40, dtype=vtype), np.zeros(10, dtype=itype), itemsize=(4, 1)) for i in xrange(10): assert np.allclose(C[i].indices, 4 * i)
def test_insert_one_item(self): C = Collection(vtype, utype) C.append(vertices, indices, uniforms) C.insert(0, vertices, indices, uniforms) assert len(C) == 2 assert np.allclose(C[0].indices, indices) assert np.allclose(C[1].indices, 4 + indices)
def __init__(self, home=None): if Collector._instance is not None: raise Exception("Called more than once") Collector._instance = self super(Collector, self).__init__() # Configuration config = Config.get_instance() self.add_manager('config', config) if home is not None: config.set_home(home) if self.conf('build_user_dir'): config.build_data_directory() # Plug-ins sys_plugin_path = config.get_appdata_path() sys_plugin_path = os.path.join(sys_plugin_path, 'user_plugins') # System plug-ins from collector.plugins import get_sys_plugins plugins = get_sys_plugins() # >= python 2.7 sys_plugins = {plugin.get_id(): plugin for plugin in plugins} plugin_manager = PluginManager.get_instance( self.conf('plugins_enabled'), sys_plugins, paths=[sys_plugin_path]) self.add_manager('plugin', plugin_manager) self.add_manager('collection', Collection.get_instance(True))
def test_list(): l = ['alpha', 'beta', 'gamma', 'delta', 'epsilon'] L = Collection(l) assert list(L) == l assert list(m for m in L) == l assert L.values() == l assert L.keys() == list(range(len(l))) assert L.items() == [(k, v) for k, v in zip(range(len(l)), l)] assert len(L) == len(l) assert 'alpha' in L assert 'beta' in L assert 'gamma' in L assert 'delta' in L assert 'epsilon' in L assert 'upsilon' not in L assert L[0] == 'alpha' assert L[1] == 'beta' assert L[2] == 'gamma' assert L[3] == 'delta' assert L[4] == 'epsilon' assert L[-1] == 'epsilon' assert L[-2] == 'delta' assert L[-3] == 'gamma' assert L[-4] == 'beta' assert L[-5] == 'alpha' with pytest.raises(IndexError) as exception: L[5] assert str(exception.value) == 'list index out of range'
def create_child(self, id): if id == -1: src = self.shell.get_player().props.source return Source(self, id, src, src.props.query_model) else: return Collection.create_child(self, id)
def __init__(self, shell): Collection.__init__(self, Source) self.shell = shell self.sources = {} self.sources_rev = {} self.source_id = 0 self.add_source(self.shell.props.library_source) pm = self.shell.get_playlist_manager() for pl in pm.get_playlists(): self.add_source(pl) pm.connect('playlist-added', self.on_playlist_added)
def get_child(self, name): if name in self._system_groups: obj = Group(self.request) obj.__name__ = name obj.__parent__ = self return obj return Collection.get_child(self, name)
def __init__(self, datacube=None, collection=None): """Set up the ingester object. datacube: A datacube instance (which has a database connection and tile_type and band dictionaries). If this is None the Ingeseter will create its own datacube instance using the arguments returned by self.parse_args(). collection: The datacube collection which will accept the ingest. if this is None the Ingeseter will set up its own collection using self.datacube. """ self.args = self.parse_args() if self.args.debug: # Set DEBUG level on the root logger logging.getLogger().setLevel(logging.DEBUG) if datacube is None: self.datacube = IngesterDataCube(self.args) else: self.datacube = datacube self.agdc_root = self.datacube.agdc_root if collection is None: self.collection = Collection(self.datacube) else: self.collection = collection
def restrict_to_ready_deliverable_works( self, query, work_model, edition_model=None, collection_ids=None, show_suppressed=False, ): """Restrict a query to show only presentation-ready works present in an appropriate collection which the default client can fulfill. Note that this assumes the query has an active join against LicensePool. :param query: The query to restrict. :param work_model: Either Work or one of the MaterializedWork materialized view classes. :param collection_ids: Only include titles in the given collections. :param show_suppressed: Include titles that have nothing but suppressed LicensePools. """ from collection import Collection collection_ids = collection_ids or [x.id for x in self.all_collections] return Collection.restrict_to_ready_deliverable_works( query, work_model, edition_model, collection_ids=collection_ids, show_suppressed=show_suppressed, allow_holds=self.allow_holds)
def veto_add_child(self, name, child, unique=True): # Return an error message (string) if there's any reason why the specified child can't be added with the specified name. # Otherwise return None if child._object_type not in self._allowed_child_types: return "This %s does not allow child objects of type %s." % (self._object_type, child._object_type) error = Collection.veto_add_child(self, name, child, unique=unique) if error: return error return None
def __init__(self, dash_atlas = None): self.vtype = np.dtype( [('a_texcoord', 'f4', 2)] ) self.utype = np.dtype( [('translate', 'f4', 2), ('scale', 'f4', 1), ('rotate', 'f4', 1), ('major_grid', 'f4', 2), ('minor_grid', 'f4', 2), ('major_tick_size', 'f4', 2), ('minor_tick_size', 'f4', 2), ('major_grid_color', 'f4', 4), ('minor_grid_color', 'f4', 4), ('major_tick_color', 'f4', 4), ('minor_tick_color', 'f4', 4), ('major_grid_width', 'f4', 1), ('minor_grid_width', 'f4', 1), ('major_tick_width', 'f4', 1), ('minor_tick_width', 'f4', 1), ('size', 'f4', 2), ('offset', 'f4', 2), ('zoom', 'f4', 1), ('antialias', 'f4', 1), ('major_dash_phase', 'f4', 1), ('minor_dash_phase', 'f4', 1), ('major_dash_index', 'f4', 1), ('major_dash_period','f4', 1), ('major_dash_caps', 'f4', 2), ('minor_dash_period','f4', 1), ('minor_dash_index', 'f4', 1), ('minor_dash_caps', 'f4', 2) ] ) self.gtype = np.dtype( [('name', 'f4', (1024,4))] ) Collection.__init__(self, self.vtype, self.utype) if dash_atlas is None: self.dash_atlas = DashAtlas() else: self.dash_atlas = dash_atlas shaders = os.path.join(os.path.dirname(__file__),'shaders') vertex_shader= os.path.join( shaders, 'grid.vert') fragment_shader= os.path.join( shaders, 'grid.frag') self.shader = Shader( open(vertex_shader).read(), open(fragment_shader).read() ) self._gbuffer = DynamicBuffer( self.gtype ) self._gbuffer_shape = [0,4*1024] self._gbuffer_id = 0
def __init__(self, pid, metadata=True, stats=True, auth=None, def_name=None, cache=False, reset_cache=False): # set project self.cache = Ipy.NB_DIR+'/'+pid if cache else None project = None # hack to get variable name if def_name == None: try: (filename,line_number,function_name,text)=traceback.extract_stack()[-2] def_name = text[:text.find('=')].strip() except: pass self.defined_name = def_name # reset cache if asked if reset_cache and os.path.isdir(self.cache): shutil.rmtree(self.cache) # make cache dir if self.cache and (not os.path.isdir(self.cache)): os.mkdir(self.cache) # try load from cached if self.cache and os.path.isdir(self.cache) and os.path.isfile(self.cache+'/'+pid+'.json'): try: project = json.load(open(self.cache+'/'+pid+'.json', 'rU')) sys.stdout.write("project '%s' loaded from cache %s\n"%(self.defined_name, pid)) except: pass # load from api if project is None: project = self._get_project(pid, metadata, auth) if project and self.cache and os.path.isdir(self.cache): # cache it if dir given and not loaded from file try: json.dump(project, open(self.cache+'/'+pid+'.json', 'w')) sys.stdout.write("project '%s' saved to cache %s\n"%(self.defined_name, pid)) except: pass if project is None: self.id = pid self.name = None return for key, val in project.iteritems(): setattr(self, key, val) # call collection init - from cache if given Collection.__init__(self, self.mgids(), metadata=metadata, stats=stats, auth=auth, def_name=self.defined_name, cache=self.cache)
def rename_child(self, name, newname, _validate=True): if Collection.rename_child(self, name, newname, _validate=_validate): names = self.get_ordered_names() if names is not None: idx = names.index(name) names[idx] = newname self._ordered_names = names self.save() return 1 else: return 0
def __init__(self, pid, auth=None, def_name=None, cache=False): # set project self._cfile = Ipy.CCH_DIR+'/'+pid+'.json' project = None if cache and os.path.isfile(self._cfile): # try load from cache if given try: project = json.load(open(self._cfile, 'rU')) if Ipy.DEBUG: sys.stdout.write("project %s loaded from cached file (%s)\n"%(pid, self._cfile)) except: pass if project is None: # load from api project = self._get_project(pid, auth) if project and cache and os.path.isdir(Ipy.CCH_DIR): # save to cache if given try: json.dump(project, open(self._cfile, 'w')) if Ipy.DEBUG: sys.stdout.write("project %s saved to cached file (%s)\n"%(pid, self._cfile)) except: pass if project is not None: for key, val in project.iteritems(): setattr(self, key, val) else: self.id = pid self.name = None return # hack to get variable name if def_name == None: try: (filename,line_number,function_name,text)=traceback.extract_stack()[-2] def_name = text[:text.find('=')].strip() except: pass self.defined_name = def_name # call collection init - from cache if given Collection.__init__(self, self.mgids(), auth=auth, def_name=self.defined_name, cache=cache)
def show_plugin(): result='' server_id=request.get_vars.host action=request.get_vars.action timespan=request.get_vars.timespan plugin=request.get_vars.plugin result=Collection.action_collection(Server.get_pathrdd_server(server_id,db),plugin,\ timespan, action) return dict(result=result)
def append( self, center=(0, 0), radius=100.0, fg_color=(0, 0, 0, 1), bg_color=(1, 1, 1, 0), linewidth=1.0, antialias=1.0, translate=(0, 0), scale=1.0, rotate=0.0, dash_pattern='dotted', dash_phase=0.0, dash_caps=('round', 'round') ): V, I, _ = self.bake(center) U = np.zeros(1, self.utype) U['linewidth'] = linewidth U['antialias'] = antialias U['fg_color'] = fg_color U['bg_color'] = bg_color U['translate'] = translate U['scale'] = scale U['rotate'] = rotate U['radius'] = radius if self.dash_atlas: dash_index, dash_period = self.dash_atlas[dash_pattern] U['dash_phase'] = 0 U['dash_index'] = dash_index U['dash_period'] = dash_period U['dash_caps'] = ( self.caps.get(dash_caps[0], 'round'), self.caps.get(dash_caps[1], 'round') ) Collection.append(self, V, I, U)
def show_graph(): server_id=request.get_vars.server_id action=request.get_vars.action plugin=request.get_vars.plugin typegraph=request.get_vars.type plugin_instance=request.get_vars.plugin_instance host=request.get_vars.host timespan=request.get_vars.timespan start=request.get_vars.start end=request.get_vars.end result=Server.get_pathrdd_timezone_server(server_id,db) path_rrd=result.f_path_rrd time_zone=result.f_time_zone response.headers['Content-Type']="image/png" return response.stream(Collection.get_graph(action, plugin, typegraph, host,\ timespan, path_rrd, time_zone, start,\ end, plugin_instance))
def process_staging(functions): #Get all files in staging directory files = [ f for f in listdir(functions.stage) if isfile(join(functions.stage,f)) ] for file in files: collection_id = -1 collection_path = "" #List collections for user functions.list_collections() print("N. New Collection") print("File: {}".format(file)) val = input("Select collection to use (id): ") #Get or create selected collection if val == "N": coll = create_collection(functions.library, functions.db) collection_id = coll.id collection_path = coll.path else: coll = Collection.get_by_id(functions.db, int(val)) collection_id = coll.id collection_path = coll.path #Assign name and move file val = input("Enter a name for {} (if blank filename will be used): ".format(file)) item_name = val if item_name == "": item_name = file item = Item(None, collection_id, item_name, file) rename(join(functions.stage, file), join(functions.library, collection_path, file)) #Persist item.insert(functions.db) print("Staging processed!")
def has_child(self, name): if name in self._system_groups: return True return Collection.has_child(self, name)
def delete_collection(self, id): collection = Collection.get_by_id(self.db, id); collection.delete();
def list_collections(self): collections = Collection.get_all(self.db) print("COLLECTIONS") Collection.print_table_header() for collection in collections: collection.print_for_table()