def test_create_event_from_dict_with_all_fields(self): with open(local(__file__).dirname + "/../../static/img/team/alja.jpg") as fp: io = StringIO.StringIO() io.write(fp.read()) uploaded_picture = InMemoryUploadedFile(io, None, "alja.jpg", "jpeg", io.len, None) uploaded_picture.seek(0) event_data = { "end_date": datetime.datetime.now(), "start_date": datetime.datetime.now(), "organizer": "some organizer", "creator": User.objects.filter(pk=1)[0], "title": "event title", "pub_date": datetime.datetime.now(), "country": "SI", "geoposition": Geoposition(46.05528, 14.51444), "location": "Ljubljana", "audience": [1], "theme": [1], "tags": ["tag1", "tag2"], "picture": uploaded_picture, } test_event = create_or_update_event(**event_data) self.assertEqual(2, test_event.pk) self.assertEqual("Ljubljana", test_event.location) self.assertEqual("46.05528", str(test_event.geoposition.latitude)) self.assertIn("tag1", test_event.tags.names()) self.assertIn("tag2", test_event.tags.names()) assert "event_picture/alja" in test_event.picture.path
def _queue_worker(rc): # multithreaded file puller, takes tuples of remote, local, item, items_done # pulls the files and then updates the progress meter jenkins_host = composite['jenkins_host'] client = rc.ssh_client client.connect(jenkins_host, username=credentials['jenkins-result']['username'], password=credentials['jenkins-result']['password'], timeout=10, allow_agent=False, look_for_keys=False, gss_auth=False) scp = None while True: source, destination, item, items_done = rc._queue.get() destination = local(destination) destination_dir = local(destination.dirname) destination_dir.ensure(dir=True) if not destination.check(): if scp is None: scp = SCPClient(client.get_transport()) try: scp.get(source, destination.strpath) except SCPException: # remote destination didn't exist pass except (SSHException, socket.timeout): # SSH blew up :( rc._queue.put((source, destination, item, items_done)) rc._queue.task_done() continue rc._progress_update(item, items_done) rc._queue.task_done()
def parse_config(self): """ Reads the config data and sets up values """ if not self.config: return False self.log_dir = local(self.config.get('log_dir', log_path)) self.log_dir.ensure(dir=True) self.artifact_dir = local(self.config.get('artifact_dir', log_path.join('artifacts'))) self.artifact_dir.ensure(dir=True) self.logger = create_logger('artifactor', self.log_dir.join('artifactor.log').strpath) self.squash_exceptions = self.config.get('squash_exceptions', False) if not self.log_dir: print "!!! Log dir must be specified in yaml" sys.exit(127) if not self.artifact_dir: print "!!! Artifact dir must be specified in yaml" sys.exit(127) self.config['zmq_socket_address'] = 'tcp://127.0.0.1:{}'.format(random_port()) self.setup_plugin_instances() self.start_server() self.global_data = { 'artifactor_config': self.config, 'log_dir': self.log_dir.strpath, 'artifact_dir': self.artifact_dir.strpath, 'artifacts': dict(), 'old_artifacts': dict() }
def load_cfme_data(filename=None): """Loads the cfme_data YAML from the given filename If the filename is omitted or None, attempts will be made to load it from its normal location in the parent of the utils directory. The cfme_data dict loaded with this method supports value randomization, thanks to the RandomizeValues class. See that class for possible options Example usage in cfme_data.yaml (quotes are important!): top_level: list: - "{random_str}" - "{random_int}" - "{random_uuid}" random_thing: "{random_string:24}" """ if filename is None: this_file = os.path.abspath(__file__) path = local(this_file).new(basename="../cfme_data.yaml") else: path = local(filename) if path.check(): cfme_data_fh = path.open() cfme_data_dict = yaml.load(cfme_data_fh) return RandomizeValues.from_dict(cfme_data_dict) else: msg = "Usable to load cfme_data file at %s" % path raise Exception(msg)
def parse_config(self): """ Reads the config data and sets up values """ if not self.config: return False self.log_dir = local(self.config.get("log_dir", log_path)) self.log_dir.ensure(dir=True) self.artifact_dir = local(self.config.get("artifact_dir", log_path.join("artifacts"))) self.artifact_dir.ensure(dir=True) self.logger = create_logger("artifactor", self.log_dir.join("artifactor.log").strpath) self.squash_exceptions = self.config.get("squash_exceptions", False) if not self.log_dir: print("!!! Log dir must be specified in yaml") sys.exit(127) if not self.artifact_dir: print("!!! Artifact dir must be specified in yaml") sys.exit(127) self.config["zmq_socket_address"] = "tcp://127.0.0.1:{}".format(random_port()) self.setup_plugin_instances() self.start_server() self.global_data = { "artifactor_config": self.config, "log_dir": self.log_dir.strpath, "artifact_dir": self.artifact_dir.strpath, "artifacts": dict(), "old_artifacts": dict(), }
def test_reports(self): print('Collecting test reports to determine best build nodes') log_dirs = self.template_log_dirs() reports = {} c = self.ssh_client jenkins_host = composite['jenkins_host'] c.connect(jenkins_host, username=credentials['jenkins-result']['username'], password=credentials['jenkins-result']['password'], timeout=10, allow_agent=False, look_for_keys=False, gss_auth=False) builds_done = {} self._progress_update(None, builds_done) for build_number, log_dir in log_dirs: build_work_dir = local(self.work_dir.join(str(build_number))) build_work_dir.ensure(dir=True) _remote = local(log_dir).join('test-report.json').strpath _local = build_work_dir.join('test-report.json').strpath builds_done[build_number] = False self._progress_update(None, builds_done) self._queue.put((_remote, _local, build_number, builds_done)) self._queue.join() self._progress_finish() for build_number, __ in log_dirs: build_work_dir = local(self.work_dir.join(str(build_number))) for path in build_work_dir.visit('*/test-report.json'): try: report = json.load(path.open()) reports[build_number] = report except: # invalid json, skip this report pass return reports
def test_participant_picture_rotate_deletes_all_old_files(app, user): pic = ProfilePictureFactory() filename = pic.value pic.custom_field.meeting.photo_field = pic.custom_field upload_dir = local(app.config['UPLOADED_CUSTOM_DEST']) crop_dir = local(app.config['UPLOADED_CROP_DEST'] / app.config['PATH_CUSTOM_KEY']) thumb_crop_dir = local(app.config['UPLOADED_THUMBNAIL_DEST'] / app.config['PATH_CROP_KEY'] / app.config['PATH_CUSTOM_KEY']) thumb_dir = local(app.config['UPLOADED_THUMBNAIL_DEST'] / app.config['PATH_CUSTOM_KEY']) image = Image.new('RGB', (250, 250), 'red') image.save(str(upload_dir.join(filename))) crop_dir.ensure(filename) thumb_name, thumb_fm = os.path.splitext(filename) thumb_full_name = Thumbnail._get_name(thumb_name, thumb_fm, '200x200', 85) thumb_crop_dir.ensure(thumb_full_name) thumb_dir.ensure(thumb_full_name) with app.test_request_context(): with app.client.session_transaction() as sess: sess['user_id'] = user.id url = url_for('meetings.custom_field_rotate', meeting_id=pic.custom_field.meeting.id, participant_id=pic.participant.id, field_slug=pic.custom_field.slug) resp = app.client.post(url) assert resp.status_code == 200 assert not upload_dir.join(filename).check() assert not crop_dir.join(filename).check() assert not thumb_crop_dir.join(thumb_full_name).check() assert not thumb_dir.join(thumb_full_name).check()
def test_participant_picture_change_deletes_all_old_files(app, user): pic = ProfilePictureFactory() filename = pic.value pic.custom_field.meeting.photo_field = pic.custom_field upload_dir = local(app.config['UPLOADED_CUSTOM_DEST']) crop_dir = local(app.config['UPLOADED_CROP_DEST'] / app.config['PATH_CUSTOM_KEY']) thumb_crop_dir = local(app.config['UPLOADED_THUMBNAIL_DEST'] / app.config['PATH_CROP_KEY'] / app.config['PATH_CUSTOM_KEY']) thumb_dir = local(app.config['UPLOADED_THUMBNAIL_DEST'] / app.config['PATH_CUSTOM_KEY']) upload_dir.ensure(filename) crop_dir.ensure(filename) thumb_name, thumb_fm = os.path.splitext(filename) thumb_full_name = Thumbnail._get_name(thumb_name, thumb_fm, '200x200', 85) thumb_crop_dir.ensure(thumb_full_name) thumb_dir.ensure(thumb_full_name) data = {'picture': (StringIO('Test'), 'test_edit.png')} with app.test_request_context(): with app.client.session_transaction() as sess: sess['user_id'] = user.id resp = app.client.post(url_for('meetings.custom_field_upload', meeting_id=pic.custom_field.meeting.id, participant_id=pic.participant.id, field_slug=pic.custom_field.slug), data=data) assert resp.status_code == 200 assert not upload_dir.join(filename).check() assert not crop_dir.join(filename).check() assert not thumb_crop_dir.join(thumb_full_name).check() assert not thumb_dir.join(thumb_full_name).check()
def local_path(path=None, *args): """ Returns a py.path, expanding environment variables """ from os.path import expandvars from py.path import local if path is None: return local(*args) if isinstance(path, str): return local(expandvars(path), expanduser=True).join(*args) return path.join(*args)
def test_cli_incorrect_param(): runner = CliRunner() with runner.isolated_filesystem(): src = local('src') dest = local('dest') src.mkdir() dest.mkdir() result = runner.invoke(envtool.main, ['convert', str(src), str(dest)]) assert result.exit_code == -1
def test_edit_event_with_all_fields(self): # First create a new event with open(local(__file__).dirname + '/../../static/img/team/alja.jpg') as fp: io = StringIO.StringIO() io.write(fp.read()) uploaded_picture = InMemoryUploadedFile( io, None, "alja.jpg", "jpeg", io.len, None) uploaded_picture.seek(0) event_data = { "end_date": datetime.datetime.now(), "start_date": datetime.datetime.now(), "organizer": "some organizer", "creator": User.objects.filter(pk=1)[0], "title": "event title", "pub_date": datetime.datetime.now(), "country": "SI", "geoposition": Geoposition(46.05528, 14.51444), "location": "Ljubljana", "audience": [1], "theme": [1], "tags": ["tag1", "tag2"], "picture": uploaded_picture } test_event = create_or_update_event(**event_data) # Then edit it with open(local(__file__).dirname + '/../../static/img/team/ercchy.jpg') as fp: io = StringIO.StringIO() io.write(fp.read()) uploaded_picture = InMemoryUploadedFile( io, None, "ercchy.jpg", "jpeg", io.len, None) uploaded_picture.seek(0) event_data = { "end_date": datetime.datetime.now(), "start_date": datetime.datetime.now(), "organizer": "another organiser", "creator": User.objects.filter(pk=1)[0], "title": "event title - edited", "pub_date": datetime.datetime.now(), "country": "SI", # "geoposition": Geoposition(46.05528,14.51444), "location": "Ljubljana", "audience": [1], "theme": [1], "tags": ["tag3", "tag4"], "picture": uploaded_picture } test_event = create_or_update_event( event_id=test_event.id, **event_data) assert "tag1" not in test_event.tags.names() assert 'event_picture/alja' not in test_event.picture assert 'event_picture/ercchy' in test_event.picture.path
def main(): """Define our main top-level entry point""" root = local(sys.argv[1]) # 1 to skip the program name pattern = sys.argv[2] if local(sys.argv[0]).purebasename == "renamefiles": rename_files(root, pattern) else: rename_dirs(root, pattern)
def test_init_from_path(self): l = local() l2 = local(l) assert l2 is l wc = py.path.svnwc('.') l3 = local(wc) assert l3 is not wc assert l3.strpath == wc.strpath assert not hasattr(l3, 'commit')
def test_compression(tmpdir, experiment): "It should compress and decompress experiment without dataloss." from leicaexperiment.experiment import decompress from PIL import Image import numpy as np # compress pngs = experiment.compress(folder=tmpdir.mkdir('pngs').strpath) # reported output is actually written and the same amount assert pngs == tmpdir.join('pngs').listdir('*.png', sort=True) assert len(pngs) == len(experiment.images) # keep data for decompress test origs = [] orig_tags = [] # check that compression is lossless for tif,png in zip(experiment.images, pngs): img = Image.open(tif) orig = np.array(img) origs.append(orig) orig_tags.append(img.tag.as_dict()) compressed = np.array(Image.open(png)) # is lossless? assert np.all(orig == compressed) new_tifs = decompress(pngs, folder=tmpdir.mkdir('new_tifs').strpath) # reported output is actually written and the same amount as original assert new_tifs == tmpdir.join('new_tifs').listdir(sort=True) assert len(new_tifs) == len(experiment.images) # orig and decompressed images have similar file size for orig,new_tif in zip(experiment.images, new_tifs): diff = abs(path.local(orig).size() - path.local(new_tif).size()) assert diff < 1024 omit_tags = [273, 278, 279] # check that decompression is lossless for tif,orig,orig_tag in zip(new_tifs, origs, orig_tags): img = Image.open(tif) decompressed = np.array(img) # compress->decompress is lossless? assert np.all(orig == decompressed) # check if TIFF-tags are intact tag = img.tag.as_dict() for omit in omit_tags: del tag[omit] del orig_tag[omit] assert tag == orig_tag
def load_credentials(filename=None): if filename is None: this_file = os.path.abspath(__file__) path = local(this_file).new(basename='../credentials.yaml') else: path = local(filename) if path.check(): credentials_fh = path.open() credentials_dict = yaml.load(credentials_fh) return credentials_dict else: msg = 'Usable to load credentials file at %s' % path raise Exception(msg)
def project(request): def fin(): tmpdir.remove(True) tmpdir = path.local(tempfile.mkdtemp()) request.addfinalizer(fin) src_setup_py = path.local().join('tests', 'centodeps-setup.py') assert src_setup_py.check() projdir = tmpdir.join('centodeps') projdir.mkdir() dst_setup_py = projdir.join('setup.py') src_setup_py.copy(dst_setup_py) assert dst_setup_py.check() return projdir
def server_hierarchy(prefix): from py.path import local srv = local(prefix).join('build', 'srv', 'salt') srv.ensure(dir=True) if not srv.join('_states').exists(): srv.join('_states').mksymlinkto(local(condiment_dir).join('_states')) if not srv.join('_modules').exists(): srv.join('_modules').mksymlinkto(local(condiment_dir).join('_modules')) if not srv.join('_grains').exists(): srv.join('_grains').mksymlinkto(local(condiment_dir).join('_grains')) local(prefix).join('build', 'etc', 'salt').ensure(dir=True) local(prefix).join('build', 'var', 'log', 'salt').ensure(dir=True) local(prefix).join('build', 'var', 'cache', 'salt', 'master').ensure(dir=True)
def pytest_collect_file(path, parent): babel_path = local(__file__).dirpath().join('babel') config = parent.config if PY2: if babel_path.common(path) == babel_path: if path.ext == '.py': return DoctestModule(path, parent)
def get_config( overrides: List[str] = None, config_path: str = "conf", config_name: str = "config", directory: str = None, as_config_class: bool = False, ) -> Union[DictConfig, Config]: """Get config (instead of running command line, as in a jupyter notebook. :param overrides: list of config overrides :param config_path: config directory path :param config_name: main config name :param directory: :return: DictConfig configuration """ initialize_config() directory = directory or os.getcwd() with local(directory).as_cwd(): overrides = overrides or [] config_path = os.path.join(directory, config_path) with initialize_config_dir(config_path): cfg = hydra_compose_config(config_name=config_name, overrides=overrides) if as_config_class: cfg = Config.from_dict_config(cfg) return cfg
def test_execve(): if os.name != 'posix': py.test.skip('posix specific function') base = " ".join([ sys.executable, execve_tests, str(local(pypy.__file__).join('..', '..')), '' ]) # Test exit status and code result = os.system(base + "execve_true") assert os.WIFEXITED(result) assert os.WEXITSTATUS(result) == 0 result = os.system(base + "execve_false") assert os.WIFEXITED(result) assert os.WEXITSTATUS(result) == 1 # Test environment result = os.popen(base + "execve_env").read() assert dict([line.split('=') for line in result.splitlines()]) == EXECVE_ENV # These won't actually execute anything, so they don't need a child process # helper. execve = getllimpl(os.execve) # If the target does not exist, an OSError should result info = py.test.raises(OSError, execve, execve_tests + "-non-existent", [], {}) assert info.value.errno == errno.ENOENT # If the target is not executable, an OSError should result info = py.test.raises(OSError, execve, execve_tests, [], {}) assert info.value.errno == errno.EACCES
def download( ngram_len=('n', 1, 'The length of ngrams to be downloaded.'), output=('o', 'downloads/google_ngrams/{ngram_len}', 'The destination folder for downloaded files.'), verbose=('v', False, 'Be verbose.'), lang=( 'l', 'eng', 'Language. [eng|eng-us|eng-gb|eng-fiction|chi-sim|fre|ger|heb|ita|rus|spa]', ), coverage=( 'c', 'all', 'The coverage / or set of data to download. [all|1M] (the default entire set "all", or the english one million "1M")', ), ): """Download The Google Books Ngram Viewer dataset version 20120701 or the english one million version 20090715.""" output = local(output.format(ngram_len=ngram_len)) output.ensure_dir() for fname, url, request in iter_google_store(ngram_len, verbose=verbose, lang=lang, coverage=coverage): with output.join(fname).open('wb') as f: for num, chunk in enumerate(request.iter_content(1024)): if verbose and not divmod(num, 1024)[1]: sys.stderr.write('.') sys.stderr.flush() f.write(chunk)
def test_meeting_custom_logos_remove(app, user, brand_dir): meeting = MeetingFactory() right_logo = (StringIO('Right'), 'right.png') left_logo = (StringIO('Left'), 'left.jpg') upload_dir = local(app.config['UPLOADED_LOGOS_DEST']) client = app.test_client() with app.test_request_context(): with client.session_transaction() as sess: sess['user_id'] = user.id product_logo = Logo('product_logo') product_side_logo = Logo('product_side_logo') resp = upload_new_logo(app, user, meeting.id, 'PRODUCT_LOGO', left_logo) assert upload_dir.join(product_logo.filename).check() resp = client.delete(url_for('meetings.logo_upload', meeting_id=meeting.id, logo_slug='PRODUCT_LOGO')) assert resp.status_code == 200 assert not upload_dir.join(product_logo.filename).check() resp = upload_new_logo(app, user, meeting.id, 'PRODUCT_SIDE_LOGO', right_logo) right_logo_filename = product_side_logo.filename assert upload_dir.join(right_logo_filename).check() resp = client.delete(url_for('meetings.logo_upload', meeting_id=meeting.id, logo_slug='PRODUCT_SIDE_LOGO')) assert resp.status_code == 200 assert not upload_dir.join(right_logo_filename).check()
def test_initialize_reldir(self, path1): old = path1.chdir() try: p = local('samplefile') assert p.check() finally: old.chdir()
def outdir(): from py.path import local path = local("test").join("out") if not path.ensure(dir=True): path.mkdir() return path
def write(self, filename, varname=None): filepath = str(path.local('%s%s.nc' % (filename, self.name))) if varname is None: varname = self.name # Derive name of 'depth' variable for NEMO convention vname_depth = 'depth%s' % self.name.lower() # Create DataArray objects for file I/O t, d, x, y = (self.time.size, self.depth.size, self.lon.size, self.lat.size) nav_lon = xray.DataArray(self.lon + np.zeros((y, x), dtype=np.float32), coords=[('y', self.lat), ('x', self.lon)]) nav_lat = xray.DataArray(self.lat.reshape(y, 1) + np.zeros(x, dtype=np.float32), coords=[('y', self.lat), ('x', self.lon)]) vardata = xray.DataArray(self.data.reshape((t, d, y, x)), coords=[('time_counter', self.time), (vname_depth, self.depth), ('y', self.lat), ('x', self.lon)]) # Create xray Dataset and output to netCDF format dset = xray.Dataset({varname: vardata}, coords={ 'nav_lon': nav_lon, 'nav_lat': nav_lat }) dset.to_netcdf(filepath)
def shorttmpdir(): """Provides a temporary directory with a shorter file system path than the tmpdir fixture. """ tmpdir = path.local(tempfile.mkdtemp()) yield tmpdir tmpdir.remove(rec=1)
def dictionary( pool, input_dir=('i', local('./downloads/google_ngrams/1'), 'The path to the directory with the Google unigram files.'), output=('o', 'dictionary.h5', 'The output file.'), output_key=('', 'dictionary', 'An identifier for the group in the store.') ): """Build the dictionary, sorted by frequency. The items in the are sorted by frequency. The output contains two columns separated by tab. The first column is the element, the second is its frequency. """ file_names = sorted(input_dir.listdir()) pieces = pool.map(load_dictionary, file_names) counts = pd.concat(pieces, ignore_index=True) counts.sort( 'count', inplace=True, ascending=False, ) counts.reset_index(drop=True, inplace=True) print(counts) counts.to_hdf( output, key=output_key, mode='w', complevel=9, complib='zlib', )
def test_find_potcars(potcar_walker_cls, temp_data_folder): """Make sure the walker finds the right number fo POTCAR files.""" potcar_archive = py_path.local(data_path('.')).join('pot_archive') walker = potcar_walker_cls(temp_data_folder.strpath) walker.walk() assert len(walker.potcars) == 7 assert not potcar_archive.exists()
def test_upload(fresh_aiida_env, temp_pot_folder): """Test uploading a family of POTCAR files.""" family_name = 'test_family' family_desc = 'Test Family' potcar_cls = get_data_class('vasp.potcar') pot_dir = temp_pot_folder.strpath potcar_ga = py_path.local(data_path('potcar')).join('Ga') assert not potcar_ga.exists() potcar_cls.upload_potcar_family(pot_dir, family_name, family_desc) assert potcar_cls.exists(element='In') assert potcar_cls.exists(element='As') assert potcar_cls.exists(element='Ga') assert not potcar_ga.exists() assert [g.name for g in potcar_cls.get_potcar_groups()] == [family_name] assert len(potcar_cls.get_potcar_group(family_name).nodes) >= 3 with pytest.raises(ValueError): potcar_cls.upload_potcar_family(pot_dir, family_name, stop_if_existing=True) assert not potcar_ga.exists() num_files, num_added, num_uploaded = potcar_cls.upload_potcar_family(pot_dir, family_name + '_new', family_desc, stop_if_existing=False) assert num_files >= 3 assert num_added >= 3 assert num_uploaded == 0 assert not potcar_ga.exists()
def test_initialize_reldir(self): old = self.root.chdir() try: p = local('samplefile') assert p.check() finally: old.chdir()
def __init__(self, directory=None, prefix='pwscf'): from py.path import local super(Extract, self).__init__() self.directory = str(local() if directory is None else directory) """ Directory where files are to be found """ self.prefix = prefix """ Prefix for files and subdirectory """
def pike_finder(): """Fixture: Return PikeFinder object :return: PikeFinder object """ finder_path = local(__file__).dirpath().dirpath() return PikeFinder([str(finder_path)])
def export(self, path, exclude_keys=None): if exclude_keys is None: exclude_keys = set() out = [] for k, v in self.routes.items(): calls = v.get('calls', []) upath = v.get('url', k.replace('_', '/')) if calls: cols = [k for k in v['calls'][0].keys() if k not in exclude_keys] txtcols = u';'.join(cols) out.append(upath + ';' + cols) else: out.append(upath) for call in calls: out.append(u';' + (u';'.join(repr(x) for k, x in \ call.items() if k not in exclude_keys)) ) out.append(u'') path = local(path) path.write(u'\n'.join(out))
def test_create_event_with_image(admin_user, admin_client, db): with open(local(__file__).dirname + '/../../static/img/team/alja.jpg') as fp: io = StringIO.StringIO() io.write(fp.read()) uploaded_picture = InMemoryUploadedFile(io, None, "alja.jpg", "jpeg", io.len, None) uploaded_picture.seek(0) event_data = { 'audience': [4, 5], 'theme': [1,2], 'contact_person': u'*****@*****.**', 'country': u'SI', 'description': u'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod\r\ntempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,\r\nquis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo\r\nconsequat. Duis aute irure dolor in reprehenderit in voluptate velit esse\r\ncillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non\r\nproident, sunt in culpa qui officia deserunt mollit anim id est laborum.', 'event_url': u'', 'location': u'Ljubljana, Slovenia', 'organizer': u'Mozilla Slovenija', 'picture': uploaded_picture, 'start_date': datetime.datetime.now(), 'end_date': datetime.datetime.now() + datetime.timedelta(days=3, hours=3), 'tags': [u'css', u'html', u'web'], 'title': u'Webmaker Ljubljana', 'user_email': u'*****@*****.**' } response = admin_client.post(reverse('web.add_event'), event_data) assert response.status_code == 302 response = admin_client.get(response.url) assert 'event_picture/alja' in response.content
def test_meeting_custom_logos(app, user, brand_dir): meeting = MeetingFactory() right_logo = (StringIO('Right'), 'right.png') left_logo = (StringIO('Left'), 'left.jpg') upload_dir = local(app.config['UPLOADED_LOGOS_DEST']) with app.test_request_context(): resp = upload_new_logo(app, user, meeting.id, 'PRODUCT_LOGO', left_logo) product_logo = Logo('product_logo') assert product_logo.url in resp.data assert upload_dir.join(product_logo.filename).check() resp = upload_new_logo(app, user, meeting.id, 'PRODUCT_SIDE_LOGO', right_logo) product_side_logo = Logo('product_side_logo') assert product_side_logo.url in resp.data assert upload_dir.join(product_side_logo.filename).check() client = app.test_client() with app.test_request_context(): with client.session_transaction() as sess: sess['user_id'] = user.id resp = client.get(url_for('meetings.logos', meeting_id=meeting.id)) assert resp.status_code == 200 html = PyQuery(resp.data) logo_src = html('#PRODUCT_LOGO a').attr('href') assert logo_src == product_logo.url side_logo_src = html('#PRODUCT_SIDE_LOGO a').attr('href') assert side_logo_src == product_side_logo.url remove_buttons = html('.remove-photo.disabled ') assert len(remove_buttons) == 0
def init_kwargs(cls, root=None): if root is None: root = os.path.join(getcwd(), 'corpora', 'CCG_BNC_v1') return dict( paths=[str(n) for n in local(root).visit() if n.check(file=True, exists=True)], )
def test_media_participant_picture_rotate(app, user): MEDIA = {'media_participant_enabled': True} pic = ProfilePictureFactory(participant__category__meeting__settings=MEDIA, participant__participant_type='media', participant__category__category_type='media', custom_field__custom_field_type='media') pic.custom_field.meeting.photo_field = pic.custom_field upload_dir = local(app.config['UPLOADED_CUSTOM_DEST']) filename = pic.value image = Image.new('RGB', (250, 250), 'red') image.save(str(upload_dir.join(filename))) client = app.test_client() with app.test_request_context(): with client.session_transaction() as sess: sess['user_id'] = user.id url = url_for('meetings.custom_field_rotate', meeting_id=pic.custom_field.meeting.id, participant_id=pic.participant.id, field_slug=pic.custom_field.slug) resp = client.post(url) assert resp.status_code == 200 assert filename != pic.value assert not upload_dir.join(filename).check() assert upload_dir.join(pic.value).check()
def init_kwargs( cls, root=None, workers_count=16, lowercase_stem=False, limit=None, ): if root is None: root = os.path.join(getcwd(), 'dep_parsed_ukwac') paths = [ str(n) for n in local(root).visit() if n.check(file=True, exists=True) ] file_passes = max(1, workers_count // len(paths)) paths = list( chain.from_iterable( ((i, p) for p in paths) for i in range(file_passes))) assert lowercase_stem in ('', 'y', False, True) lowercase_stem = bool(lowercase_stem) return dict( paths=paths, file_passes=file_passes, lowercase_stem=lowercase_stem, limit=limit, )
def temp_dir(): """Temporary directory context manager that deletes the tempdir after use.""" try: tempdir = tempfile.mkdtemp() yield py_path.local(tempdir) finally: shutil.rmtree(tempdir)
def crossbar(tmpdir): if not find_spec('crossbar'): pytest.skip("crossbar not found") local(__name__).dirpath('.crossbar/config.yaml').copy( tmpdir.mkdir('.crossbar')) spawn = pexpect.spawn('crossbar start --logformat none', cwd=str(tmpdir)) try: spawn.expect('Realm .* started') spawn.expect('Guest .* started') spawn.expect('Coordinator ready') except: print("crossbar startup failed with {}".format(spawn.before)) raise yield spawn spawn.close(force=True) assert not spawn.isalive()
def init_kwargs( cls, root=None, workers_count=16, lowercase_stem=False, limit=None, ): if root is None: root = os.path.join(getcwd(), 'dep_parsed_ukwac') paths = [ str(n) for n in local(root).visit() if n.check(file=True, exists=True) ] file_passes = max(1, workers_count // len(paths)) paths = list( chain.from_iterable( ((i, p) for p in paths) for i in range(file_passes) ) ) assert lowercase_stem in ('', 'y', False, True) lowercase_stem = bool(lowercase_stem) return dict( paths=paths, file_passes=file_passes, lowercase_stem=lowercase_stem, limit=limit, )
def export_file(self, path, dry_run=False): """ Write the contents of the stored POTCAR file to a destination on the local file system. :param path: path to the destination file or folder When given a folder, the destination file will be created in a subdirectory with the name of the symbol. This is for conveniently exporting multiple files into the same folder structure as the POTCARs are distributed in. Examples:: potcar_file = PotcarFileData.get_or_create(<file>) assert potcar_file.symbol == 'Si_d' potcar_file.export('./POTCAR.Si') ## writes to ./POTCAR.Si potcar_file.export('./potcars/') ## writes to ## ./ ## |-potcars/ ## |-Si_d/ ## |-POTCAR """ path = py_path.local(path) if path.isdir(): path = path.join(self.symbol, 'POTCAR') if not dry_run: with path.open(mode='wb', ensure=True) as dest_fo: dest_fo.write(self.get_content()) return path
def subprocess_mock(mocker): import subprocess original = subprocess.Popen agent = local(labgrid.util.agentwrapper.__file__).dirpath('agent.py') def run(args, **kwargs): assert args[0] in ['rsync', 'ssh'] if args[0] == 'rsync': src = local(args[-2]) assert src == agent dst = args[-1] assert ':' in dst dst = dst.split(':', 1)[1] assert '/' not in dst assert dst.startswith('.labgrid_agent') return original(['true'], **kwargs) elif args[0] == 'ssh': assert '--' in args args = args[args.index('--') + 1:] assert len(args) == 2 assert args[0] == 'python3' assert args[1].startswith('.labgrid_agent') # we need to use the original here to get the coverage right return original(['python3', str(agent)], **kwargs) mocker.patch('subprocess.Popen', run)
def export_family_folder(cls, family_name, path='.', dry_run=False): """ Export a family of POTCAR nodes into a file hierarchy similar to the one POTCARs are distributed in. :param family_name: name of the POTCAR family :param path: path to a local directory :param dry_run: bool, if True, only collect the names of files that would otherwise be written. If ``path`` already exists, everything will be written into a subdirectory with the name of the family. """ path = py_path.local(path) if path.exists(): path = path.join(family_name) group = cls.get_potcar_group(family_name) all_file_nodes = [potcar.find_file_node() for potcar in group.nodes] files_written = [] with temp_dir() as staging_dir: for file_node in all_file_nodes: new_file = file_node.export_file(staging_dir, dry_run=dry_run) files_written.append(path.join(new_file.relto(staging_dir))) if not dry_run: staging_dir.copy(path, stat=True) return files_written
def test_local(self): p = path.local() assert hasattr(p, 'atime') assert hasattr(p, 'setmtime') assert p.check() assert p.check(local=1) self.repr_eval_test(p)
def test_meeting_participant_document_delete(app, user): category = MeetingCategoryFactory() meeting = category.meeting doc_field = DocumentFieldFactory(meeting=meeting) data = ParticipantFactory.attributes() data['category_id'] = category.id data[doc_field.slug] = (StringIO('Test'), 'test.pdf') upload_dir = local(app.config['UPLOADED_CUSTOM_DEST']) client = app.test_client() with app.test_request_context(): add_custom_fields_for_meeting(category.meeting) populate_participant_form(category.meeting, data) with client.session_transaction() as sess: sess['user_id'] = user.id resp = client.post(url_for('meetings.participant_edit', meeting_id=category.meeting.id), data=data) assert resp.status_code == 302 assert Participant.query.current_meeting().participants().first() participant = Participant.query.get(1) doc_field_value = (participant.custom_field_values .filter_by(custom_field=doc_field).first()) assert doc_field_value is not None assert upload_dir.join(doc_field_value.value).check() resp = client.delete(url_for('meetings.custom_field_upload', participant_id=participant.id, field_slug=doc_field.slug)) assert resp.status_code == 200 assert not upload_dir.join(doc_field_value.value).check() assert participant.custom_field_values.count() == 0
def cooccurrence( pool=None, context=('c', 'context.csv', 'The file with context words.'), targets=('t', 'targets.csv', 'The file with target words.'), input_dir=( 'i', local('./downloads/google_ngrams/5_cooccurrence'), 'The path to the directory with the co-occurence.', ), output=('o', 'matrix.h5', 'The output matrix file.'), ): """Build the co-occurrence matrix.""" file_names = input_dir.listdir(sort=True) pieces = pool.map(load_cooccurrence, ((f, targets, context) for f in file_names)) # Get rid of empty frames pieces = list(filter(len, pieces)) while len(pieces) > 1: logger.info('Pairs left %s', len(pieces)) if divmod(len(pieces), 2)[1]: odd = [pieces.pop()] else: odd = [] pieces = list(pool.map(group_sum, get_pairs(pieces))) + odd matrix, = pieces write_space(output, context, targets, matrix)
def test_create_event_from_dict_with_all_fields(self): with open(local(__file__).dirname + '/../../static/img/team/alja.jpg') as fp: io = StringIO.StringIO() io.write(fp.read()) uploaded_picture = InMemoryUploadedFile(io, None, "alja.jpg", "jpeg", io.len, None) uploaded_picture.seek(0) event_data = { "end_date": datetime.datetime.now(), "start_date": datetime.datetime.now(), "organizer": "some organizer", "creator": User.objects.filter(pk=1)[0], "title": "event title", "pub_date": datetime.datetime.now(), "country": "SI", "geoposition": Geoposition(46.05528,14.51444), "location": "Ljubljana", "audience": [1], "theme": [1], "tags": ["tag1", "tag2"], "picture": uploaded_picture } test_event = create_or_update_event(**event_data) self.assertEqual(2, test_event.pk) self.assertEqual("Ljubljana", test_event.location) self.assertEqual("46.05528", str(test_event.geoposition.latitude)) self.assertIn("tag1", test_event.tags.names()) self.assertIn("tag2", test_event.tags.names()) assert 'event_picture/alja' in test_event.picture.path
def experiment(tmpdir): "'experiment--test' in tmpdir. Returns Experiment object." from leicaexperiment import Experiment e = path.local(__file__).dirpath().join('experiment') e.copy(tmpdir.mkdir('experiment')) return Experiment(tmpdir.join('experiment').strpath)
def datadir(): from py.path import local test_dir = os.path.dirname(__file__) path = local(test_dir).join("data") if not path.ensure(dir=True): path.mkdir() return path
def from_nemo(cls, basename, uvar='vozocrtx', vvar='vomecrty', indices={}, extra_vars={}, allow_time_extrapolation=False, **kwargs): """Initialises grid data from files using NEMO conventions. :param basename: Base name of the file(s); may contain wildcards to indicate multiple files. :param extra_vars: Extra fields to read beyond U and V :param indices: Optional dictionary of indices for each dimension to read from file(s), to allow for reading of subset of data. Default is to read the full extent of each dimension. """ dimensions = { 'lon': 'nav_lon', 'lat': 'nav_lat', 'depth': 'depth', 'time': 'time_counter' } extra_vars.update({'U': uvar, 'V': vvar}) filenames = dict([(v, str(path.local("%s%s.nc" % (basename, v)))) for v in extra_vars.keys()]) return cls.from_netcdf( filenames, indices=indices, variables=extra_vars, dimensions=dimensions, allow_time_extrapolation=allow_time_extrapolation, **kwargs)
def test_create_event_with_image(admin_user, admin_client, db): with open(local(__file__).dirname + '/../../static/img/team/alja.jpg') as fp: io = StringIO.StringIO() io.write(fp.read()) uploaded_picture = InMemoryUploadedFile(io, None, "alja.jpg", "jpeg", io.len, None) uploaded_picture.seek(0) event_data = { 'audience': [4, 5], 'theme': [1, 2], 'contact_person': u'*****@*****.**', 'country': u'SI', 'description': u'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod\r\ntempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,\r\nquis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo\r\nconsequat. Duis aute irure dolor in reprehenderit in voluptate velit esse\r\ncillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non\r\nproident, sunt in culpa qui officia deserunt mollit anim id est laborum.', 'event_url': u'', 'location': u'Ljubljana, Slovenia', 'organizer': u'Mozilla Slovenija', 'picture': uploaded_picture, 'start_date': datetime.datetime.now(), 'end_date': datetime.datetime.now() + datetime.timedelta(days=3, hours=3), 'tags': [u'css', u'html', u'web'], 'title': u'Webmaker Ljubljana', 'user_email': u'*****@*****.**' } response = admin_client.post(reverse('web.add_event'), event_data) assert response.status_code == 302 response = admin_client.get(response.url) assert 'event_picture/alja' in response.content
def mock_vasp(): """Verify input files are parseable and copy in output files.""" from aiida.manage.configuration.settings import AIIDA_CONFIG_FOLDER # pylint: disable=import-outside-toplevel pwd = py_path.local('.') aiida_path = py_path.local(AIIDA_CONFIG_FOLDER) aiida_cfg = aiida_path.join('config.json') click.echo('DEBUG: AIIDA_PATH = {}'.format(os.environ.get('AIIDA_PATH'))) click.echo('DEBUG: AIIDA_CONFIG_FOLDER = {}'.format(aiida_path.strpath)) assert aiida_path.isdir() assert aiida_cfg.isfile() click.echo(aiida_cfg.read()) incar = pwd.join('INCAR') assert incar.isfile(), 'INCAR input file was not found.' potcar = pwd.join('POTCAR') assert potcar.isfile(), 'POTCAR input file not found.' poscar = pwd.join('POSCAR') assert poscar.isfile(), 'POSCAR input file not found.' kpoints = pwd.join('KPOINTS') assert kpoints.isfile(), 'KPOINTS input file not found.' incar_parser = IncarParser(file_path=incar.strpath) assert incar_parser, 'INCAR could not be parsed.' assert PotcarIo(path=potcar.strpath), 'POTCAR could not be parsed.' assert PoscarParser(file_path=poscar.strpath), 'POSCAR could not be parsed.' assert KpointsParser(file_path=kpoints.strpath), 'KPOINTS could not be parsed.' system = incar_parser.incar.get('system', '') try: test_case = system.strip().split(':')[1].strip() except IndexError: test_case = '' if not test_case: output_file('outcar', 'OUTCAR').copy(pwd.join('OUTCAR')) output_file('vasprun', 'vasprun.xml').copy(pwd.join('vasprun.xml')) output_file('chgcar', 'CHGCAR').copy(pwd.join('CHGCAR')) output_file('wavecar', 'WAVECAR').copy(pwd.join('WAVECAR')) output_file('eigenval', 'EIGENVAL').copy(pwd.join('EIGENVAL')) output_file('doscar', 'DOSCAR').copy(pwd.join('DOSCAR')) poscar.copy(pwd.join('CONTCAR')) else: test_data_path = data_path(test_case, 'out') for out_file in py_path.local(test_data_path).listdir(): out_file.copy(pwd)
def _execute(self, options, args): logger = get_logger('vcs', self.site.loghandlers) self.site.scan_posts() repo_path = local('.') wd = workdir.open(repo_path) # See if anything got deleted del_paths = [] flag = False for s in wd.status(): if s.state == 'removed': if not flag: logger.info('Found deleted files') flag = True logger.info('DEL => {}', s.relpath) del_paths.append(s.relpath) if flag: logger.info('Marking as deleted') wd.remove(paths=del_paths) wd.commit(message='Deleted Files', paths=del_paths) # Collect all paths that should be kept under control # Post and page sources paths = [] for lang in self.site.config['TRANSLATIONS']: for p in self.site.timeline: paths.extend(p.fragment_deps(lang)) # Files in general for k, v in self.site.config['FILES_FOLDERS'].items(): paths.extend(get_path_list(k)) for k, v in self.site.config['LISTINGS_FOLDERS'].items(): paths.extend(get_path_list(k)) for k, v in self.site.config['GALLERY_FOLDERS'].items(): paths.extend(get_path_list(k)) # Themes and plugins for p in ['plugins', 'themes']: paths.extend(get_path_list(p)) # The configuration paths.extend('conf.py') # Add them to the VCS paths = list(set(paths)) wd.add(paths=paths) flag = False for s in wd.status(): if s.state == 'added': if not flag: logger.info('Found new files') flag = True logger.info('NEW => {}', s.relpath) logger.info('Committing changes') wd.commit(message='Updated files')
def __init__(self, tmpdir_factory): self.resources_path = local('tests/resources') self.root_dir = tmpdir_factory.mktemp('accept_batch') self.sub_root = self.root_dir.ensure_dir('sub_root') self.asp_root = self.root_dir.ensure_dir('asp_root') self.input_batch_dir = (self.resources_path / 'accept_batch/topmed/phase3/biome/01/24a') self.output_batch_dir = self.sub_root / 'topmed/phase3/biome/01/24a' self.dest_dir = self.asp_root / 'BioMe/BioMe_batch24a' # Main config file self.config_file = self.root_dir.join('config.yaml') config = dict(asp_root=str(self.asp_root), sub_root=str(self.sub_root)) self.config_file.write_text( yaml.dump(config, default_flow_style=False), 'ascii') # Expected contents state_00.yaml state_00_yaml = local('tests/resources/state_00.yaml') self.state_00_contents = state_00_yaml.read_text('ascii')
def test_chdir(self, tmpdir): old = local() try: res = tmpdir.chdir() assert str(res) == str(old) assert py.std.os.getcwd() == str(tmpdir) finally: old.chdir()