def s3_inventory_data_file(): return ( URL(INVENTORY_FOLDER) / URL(INVENTORY_BUCKET_NAME) / URL("data") / INVENTORY_DATA_FILE )
def __init__( self, fs, url, mode="rb", block_size=None, cache_type="bytes", cache_options=None, asynchronous=False, session=None, loop=None, **kwargs ): path = fs._strip_protocol(url) url = URL(fs.webdav_url) / path self.url = url.as_uri() self.asynchronous = asynchronous self.session = session self.loop = loop if mode not in {"rb", "wb"}: raise ValueError super(HTTPFile, self).__init__( fs=fs, path=path, mode=mode, block_size=block_size, cache_type=cache_type, cache_options=cache_options, **kwargs )
def s3_inventory_manifest_file(): return ( URL(INVENTORY_FOLDER) / URL(INVENTORY_BUCKET_NAME) / "2021-09-17T00-00Z" / INVENTORY_MANIFEST_FILE )
def __init__( self, fs, url, mode="rb", asynchronous=False, session=None, loop=None, **kwargs ): path = fs._strip_protocol(url) url = URL(fs.webdav_url) / path self.url = url.as_uri() self.details = {"name": self.url, "size": None} self.asynchronous = asynchronous self.session = session self.loop = loop super(HTTPStreamFile, self).__init__( fs=fs, path=path, mode=mode, block_size=0, cache_type="none", cache_options={}, **kwargs) if self.mode == "rb": self.r = sync(self.loop, self.session.get, self.url, **self.kwargs) elif self.mode == "wb": pass else: raise ValueError
def gen_url_option(str_opt, set_site=set_site, set_runcontrol=set_runcontrol, set_initcond=set_initcond, source='docs'): '''construct a URL for option based on source :param str_opt: option name, defaults to '' :param str_opt: str, optional :param source: URL source: 'docs' for readthedocs.org; 'github' for github repo, defaults to 'docs' :param source: str, optional :return: a valid URL pointing to the option related resources :rtype: urlpath.URL ''' dict_base = { 'docs': URL('https://suews-docs.readthedocs.io/en/latest/input_files/'), 'github': URL('https://github.com/Urban-Meteorology-Reading/SUEWS-Docs/raw/master/docs/source/input_files/' ), } url_base = dict_base[source] url_page = choose_page(str_opt, set_site, set_runcontrol, set_initcond, source=source) # print('str_opt', str_opt, url_base, url_page) str_opt_x = form_option(str_opt) url_opt = url_base / (url_page + str_opt_x) return url_opt
def fetch_orbit_spec_version_no(api_base_url): """Get the Orbit API spec version number from the given source.""" spec_version_url = URL(api_base_url) / "About/Version" spec_version_response = spec_version_url.get() spec_version = spec_version_response.json().get("version") if spec_version is None: raise LookupError("Failed to fetch OrbitAPI spec version number.") else: return spec_version
def named_contexts(self) -> Dict[str, LDContext]: """Reusable named contexts.""" return { 'github-generated': as_document(URL('file://', self.plugin_data_dir / 'gh-auto.yaml'), ), 'github': as_document(URL('file://', self.plugin_data_dir / 'gh.yaml'), ), }
def test_join(self): url = URL('http://www.example.com/path/to/file.ext?query#fragment') self.assertEqual(str(url / 'https://secure.example.com/path'), 'https://secure.example.com/path') self.assertEqual(str(url / '/changed/path'), 'http://www.example.com/changed/path') self.assertEqual(str(url.with_name('other_file')), 'http://www.example.com/path/to/other_file')
def test_fragment(self): url = URL('http://www.example.com/path/to/file.ext?query#fragment') self.assertEqual(url.fragment, 'fragment') url = url.with_fragment('new fragment') self.assertEqual(str(url), 'http://www.example.com/path/to/file.ext?query#new fragment') self.assertEqual(url.fragment, 'new fragment')
def get_spec_from_api(api_base_url): """Get the spec and version at the given source.""" spec_url = URL(api_base_url) / "swagger/v2/swagger.json" spec_response = spec_url.get() spec = spec_response.json() version = fetch_orbit_spec_version_no(api_base_url) return spec, version
def test_idempotent(self): url = URL( 'http://\u65e5\u672c\u8a9e\u306e.\u30c9\u30e1\u30a4\u30f3.jp/' 'path/to/\u30d5\u30a1\u30a4\u30eb.ext?\u30af\u30a8\u30ea') self.assertEqual(url, URL(str(url))) self.assertEqual( url, URL('http://xn--u9ju32nb2abz6g.xn--eckwd4c7c.jp/' 'path/to/\u30d5\u30a1\u30a4\u30eb.ext?\u30af\u30a8\u30ea'))
async def _put_file(self, lpath, rpath, **kwargs): self.webdav_url = self._get_webdav_url(rpath) or self.webdav_url path = self._strip_protocol(rpath) url = URL(self.webdav_url) / path url = url.as_uri() kw = self.kwargs.copy() kw.update(kwargs) with open(lpath, "rb") as fd: r = await self.session.put(url, data=fd, **self.kwargs) r.raise_for_status()
def test_fragment(self): url = URL('http://www.example.com/path/to/file.ext?query#fragment') self.assertEqual(url.fragment, 'fragment') url = url.with_fragment('new fragment') self.assertEqual( str(url), 'http://www.example.com/path/to/file.ext?query#new fragment') self.assertEqual(url.fragment, 'new fragment')
def get_signed_url_for_permstore_blob(permstore_url): blob_url = URL(permstore_url) # create sas signature blob_service = __get_perm_store_service() sas_signature = blob_service.generate_blob_shared_access_signature( os.getenv('DESTINATION_CONTAINER_NAME'), blob_url.name, BlobPermissions.READ, datetime.utcnow() + timedelta(hours=1)) logging.debug("INFO: have sas signature {}".format(sas_signature)) signed_url = blob_url.with_query(sas_signature) return signed_url.as_uri()
def test_trailing_sep(self): original = 'http://www.example.com/path/with/trailing/sep/' url = URL(original) self.assertEqual(str(url), original) self.assertEqual(url.name, 'sep') self.assertEqual(url.parts[-1], 'sep') self.assertEqual(URL('htp://example.com/').trailing_sep, '') self.assertEqual(URL('htp://example.com/with/sep/').trailing_sep, '/') self.assertEqual(URL('htp://example.com/without/sep').trailing_sep, '') self.assertEqual( URL('htp://example.com/with/double-sep//').trailing_sep, '//')
def __init__( self, symbol, *, log_level=logging.INFO, test=True, api_key=None, api_secret=None, ) -> None: super().__init__(log_level=log_level) u = URL("https://testnet.bitmex.com/api/v1") host = str(u) if test else str(u.with_netloc("www.bitmex.com")) self.__reset() self.connect(host, symbol, shouldAuth=True)
async def _rm_file(self, path, **kwargs): """ Remove file or directory (must be empty) :param path: (str) """ url = URL(self.api_url) / 'namespace' / _encode(path) url = url.as_uri() kw = self.kwargs.copy() kw.update(kwargs) async with self.session.delete(url, **kw) as r: if r.status == 404: raise FileNotFoundError(url) r.raise_for_status()
def _get_details(path, data): """ Extract details from the metadata returned by the dCache API :param path: (str) file or directory path :param data: (dict) metadata as provided by the API :return (dict) parsed metadata """ path = URL(path) name = data.get('fileName') # fileName might be missing name = path/name if name is not None else path name = name.path element_type = data.get('fileType') element_type = DCACHE_FILE_TYPES.get(element_type, 'other') created = data.get('creationTime') # in ms created = datetime.fromtimestamp(created / 1000.) modified = data.get('mtime') # in ms modified = datetime.fromtimestamp(modified / 1000.) return dict( name=name, size=data.get('size'), type=element_type, created=created, modified=modified )
def _record(self, op: _Ops) -> Tuple[bool, int]: mypage_url = URL('https://attendance.moneyforward.com/my_page') # 1. GET my_page and scrape authenticity_token cookies = {'_session_id': self._sess.session_id} mypage = req.request('GET', mypage_url, cookies=cookies) if not mypage.ok: return False, mypage.status_code form_input = bs(mypage.content.decode()).find('input', attrs={'value': op.value}) if form_input is None: raise RuntimeError(f'"{_Ops.to_human_readable(op)}" is not available') token = form_input.parent.find('input', attrs={'name': 'authenticity_token'}).attrs['value'] # 2. POST time record web_time_recorder_url = path / 'my_page' / 'web_time_recorder' cookies = {'_session_id': self._sess.session_id} d = datetime.utcnow() form = { 'authenticity_token': token, 'web_time_recorder_form[event]': op.value, 'web_time_recorder_form[date]': f'{d.year}/{d.month}/{d.day}', 'web_time_recorder_form[user_time]': d.strftime('%Y-%m-%dT%H:%M:%S.000Z'), 'web_time_recorder_form[office_location_id]': self._sess.location_id, } recorder = req.request('POST', web_time_recorder_url, cookies=cookies, data=form) return recorder.ok, recorder.status_code
class ProdConfig(Config): ''' Build configuration: freezing to static files and serving locally ''' ENV = 'production' IMG_DOMAIN = URL("http://localhost:5003/") # Manually add fonts to list to incorporate into freezer FONTS = { # tuple: vf, latin, woff2 'Roboto_Slab': 'RobotoSlab-VariableFont_wght-Latin.woff2', 'Public_Sans': 'PublicSans-VariableFont_wght-Min.woff2', } # Frozen Flask config FREEZER_DESTINATION_IGNORE = ['404.html', 'netlify.toml'] FREEZER_STATIC_IGNORE = [ 'fonts/', 'scss/', 'img/', 'css/', 'favicon/', 'js/', '.DS_Store', ] FREEZER_IGNORE_404_NOT_FOUND = True CSS_OUT_DIR = Path('website', 'build', 'static', 'css') # Flask-HTMLmin MINIFY_HTML = True
def named_contexts(self) -> Dict[str, LDContext]: """Reusable named contexts.""" return { 'prov': as_document( URL('file://', self.plugin_data_dir / 'named-context.yaml'), ), }
def create_and_upload_stac(cog_file: Path, s3_dst: str, year) -> Item: out_path = URL(f"{s3_dst}/{year}/") log.info("Item base creation") item = create_stac_item( str(cog_file), id=str(odc_uuid("gmw", "2.0", [cog_file.name.replace("tif", "")])), with_proj=True, input_datetime=datetime(int(year), 12, 31), properties={ "odc:product": "gmw", "start_datetime": f"{year}-01-01T00:00:00Z", "end_datetime": f"{year}-12-31T23:59:59Z", }, ) log.info("links creation") item.set_self_href(str(out_path / f"gmw_{year}_stac-item.json")) item.add_links([ pystac.Link( target=str(SOURCE_URL_PATH / FILE_NAME.format(year=year)), title="Source file", rel=pystac.RelType.DERIVED_FROM, media_type="application/zip", ) ]) out_data = out_path / cog_file.name # Remove asset created by create_stac_item and add our own del item.assets["asset"] item.assets["mangrove"] = pystac.Asset( href=str(out_data), title="gmw-v1.0", media_type=pystac.MediaType.COG, roles=["data"], ) log.info(f"Item created {item.to_dict()}") log.info(f"Item validated {item.validate()}") log.info(f"Dump the data to S3 {str(cog_file)}") s3_dump( data=open(str(cog_file), "rb").read(), url=str(out_data), ACL="bucket-owner-full-control", ContentType="image/tiff", ) log.info(f"File written to {out_data}") log.info("Write STAC to S3") s3_dump( data=json.dumps(item.to_dict(), indent=2), url=item.self_href, ACL="bucket-owner-full-control", ContentType="application/json", ) log.info(f"STAC written to {item.self_href}") return item
def _make_auth(self): config_path = self.config_file_path.parent.expanduser() if not config_path.exists(): self.log.info("Creating directory %s", config_path) config_path.mkdir(parents=True) config_file = self.config_file_path if not config_file.exists(): self.log.info("Creating config file %s", config_path) url = input("Enter your website url: ") key = input("Enter your OAuth Client Key: ") secret = input("Enter OAuth Client Secret: ") user_info = {"url": url, "key": key, "secret": secret} config_file.write_text(yaml.dump(user_info)) else: user_info = yaml.safe_load(config_file.read_text()) for param in ["url", "key", "secret"]: if param not in user_info: raise AssertionError( f"Not found '{param}' in {config_file}") base_url = URL(user_info["url"]).resolve() api_endpoint = str(base_url / "wp-json/wp/v2") request_token_url = str(base_url / "oauth1/request") authorization_base_url = str(base_url / "oauth1/authorize") access_token_url = str(base_url / "oauth1/access") if "owner_key" not in user_info: oauth = OAuth1Session(user_info["key"], client_secret=user_info["secret"]) fetch_response = oauth.fetch_request_token(request_token_url) resource_owner_key = fetch_response.get("oauth_token") resource_owner_secret = fetch_response.get("oauth_token_secret") authorization_url = oauth.authorization_url(authorization_base_url) print(authorization_url) verifier = input("Visit authorization_url. And input verifier.") # Using OAuth1Session oauth = OAuth1Session( user_info["key"], client_secret=user_info["secret"], resource_owner_key=resource_owner_key, resource_owner_secret=resource_owner_secret, verifier=verifier, ) oauth_tokens = oauth.fetch_access_token(access_token_url) resource_owner_key = oauth_tokens.get("oauth_token") resource_owner_secret = oauth_tokens.get("oauth_token_secret") user_info["owner_key"] = resource_owner_key user_info["owner_secret"] = resource_owner_secret config_file.write_text(yaml.dump(user_info)) else: resource_owner_key = user_info["owner_key"] resource_owner_secret = user_info["owner_secret"] self.api_endpoint = base_url / "wp-json/wp/v2" self.oauth = OAuth1Session( user_info["key"], client_secret=user_info["secret"], resource_owner_key=resource_owner_key, resource_owner_secret=resource_owner_secret, )
def __init__(self, config: dict): _conf = copy.deepcopy(config) # build url for this connection corename = _conf.pop("corename") _conf["url"] = str(URL(_conf["url"]) / corename) self.con = pysolr.Solr(**_conf)
def upload_image_link(wp, boost_note_root, image_link): actual_src = image_link["src"].replace( ":storage", str(boost_note_root / "attachments")) result = wp.upload_image(actual_src, name=image_link["text"]) result = result.json() url = URL(result["guid"]["rendered"]) url.path return url.path
def contexts_by_url(self, url: URL) -> Iterable[LDContext]: """Yield all contexts by URL.""" ancestor_directories = self.ancestors_by_url(url) for directory in ancestor_directories: for filename in self.context_filenames: if (context_file := directory / filename).exists(): yield self.as_jsonld_document( url=URL(f'file://{context_file}'), )
def test_query_field_order(self): url = URL('http://example.com/').with_query(field1='field1', field2='field2', field3='field3') self.assertEqual( str(url), 'http://example.com/?field1=field1&field2=field2&field3=field3')
async def _get_file(self, rpath, lpath, chunk_size=5 * 2 ** 20, **kwargs): self.webdav_url = self._get_webdav_url(rpath) or self.webdav_url path = self._strip_protocol(rpath) url = URL(self.webdav_url) / path url = url.as_uri() kw = self.kwargs.copy() kw.update(kwargs) async with self.session.get(url, **self.kwargs) as r: if r.status == 404: raise FileNotFoundError(rpath) r.raise_for_status() with open(lpath, "wb") as fd: chunk = True while chunk: chunk = await r.content.read(chunk_size) fd.write(chunk)
def readable_status(self) -> str: """Render status in a readable form.""" url = URL(str(self.status)) if url.scheme == 'local': return url.path return str(url)
def url_parts(self) -> Tuple[str, str]: """Parse the GitHub URL.""" try: _hostname, owner_name, repo_name, *etc = URL(self.url).parts except ValueError as err: raise ValueError( f'{self.url} is not a valid GitHub repo URL.', ) from err return owner_name, repo_name
def __call__(self, url: str, options: PyLDOptions) -> PyLDResponse: """Compile document for PyLD.""" return { 'document': self.as_jsonld_document( url=URL(url), iri=url, ), 'contextUrl': None, }
def test_simple(self): original = 'http://www.example.com/path/to/file.ext?query#fragment' url = URL(original) self.assertEqual(str(url), original) self.assertEqual(url.as_uri(), original) self.assertEqual(url.as_posix(), original) self.assertEqual(url.drive, 'http://www.example.com') self.assertEqual(url.root, '/') self.assertEqual(url.anchor, 'http://www.example.com/') self.assertEqual(url.path, '/path/to/file.ext') self.assertEqual(url.name, 'file.ext') self.assertEqual(url.suffix, '.ext') self.assertListEqual(url.suffixes, ['.ext']) self.assertEqual(url.stem, 'file') self.assertTupleEqual(url.parts, ('http://www.example.com/', 'path', 'to', 'file.ext')) self.assertEqual(str(url.parent), 'http://www.example.com/path/to') self.assertEqual(url.scheme, 'http') self.assertEqual(url.netloc, 'www.example.com') self.assertEqual(url.query, 'query') self.assertEqual(url.fragment, 'fragment')
def test_query(self): query = 'field1=value1&field1=value2&field2=hello,%20world%26python' url = URL('http://www.example.com/form?' + query) self.assertEqual(url.query, query) self.assertSetEqual(set(url.form), {'field1', 'field2'}) self.assertTupleEqual(url.form.get('field1'), ('value1', 'value2')) self.assertTupleEqual(url.form.get('field2'), ('hello, world&python', )) self.assertIn('field1', url.form) self.assertIn('field2', url.form) self.assertNotIn('field3', url.form) self.assertNotIn('field4', url.form) url = url.with_query({'field3': 'value3', 'field4': [1, 2, 3]}) self.assertSetEqual(set(url.form), {'field3', 'field4'}) self.assertNotIn('field1', url.form) self.assertNotIn('field2', url.form) self.assertIn('field3', url.form) self.assertIn('field4', url.form) self.assertTupleEqual(url.form.get('field3'), ('value3', )) self.assertTupleEqual(url.form.get('field4'), ('1', '2', '3'))
def test_with(self): url = URL('http://www.example.com/path/to/file.exe?query?fragment') self.assertEqual(str(url.with_scheme('https')), 'https://www.example.com/path/to/file.exe?query?fragment') self.assertEqual(str(url.with_netloc('localhost')), 'http://localhost/path/to/file.exe?query?fragment') self.assertEqual(str(url.with_userinfo('username', 'password')), 'http://*****:*****@www.example.com/path/to/file.exe?query?fragment') self.assertEqual(str(url.with_userinfo(None, None)), 'http://www.example.com/path/to/file.exe?query?fragment') self.assertEqual(str(url.with_hostinfo('localhost', 8080)), 'http://localhost:8080/path/to/file.exe?query?fragment') self.assertEqual(str(URL('http://example.com/base/') / 'path/to/file'), 'http://example.com/base/path/to/file') self.assertEqual(str(URL('http://example.com/path/?q') / URL('http://localhost/app/?q') / URL('to/content')), 'http://localhost/app/to/content')
def test_resolve(self): url = URL('http://www.example.com//./../path/./..//./file/') self.assertEqual(str(url.resolve()), 'http://www.example.com/file')