def check_url(module, url): parsed_url = urlparse(url) if len(parsed_url.path) > 0: sch = parsed_url.scheme if (sch == 'http' or sch == 'https' or len(parsed_url.scheme) == 0): return True module.fail_json(msg="Image Path URL. Wrong Format %s" % (url)) return False
def get_s3_client(module, aws_connect_kwargs, location, ceph, s3_url): if s3_url and ceph: # TODO - test this ceph = urlparse(s3_url) params = dict(module=module, conn_type='client', resource='s3', use_ssl=ceph.scheme == 'https', region=location, endpoint=s3_url, **aws_connect_kwargs) elif is_fakes3(s3_url): fakes3 = urlparse(s3_url) port = fakes3.port if fakes3.scheme == 'fakes3s': protocol = "https" if port is None: port = 443 else: protocol = "http" if port is None: port = 80 params = dict(module=module, conn_type='client', resource='s3', region=location, endpoint="%s://%s:%s" % (protocol, fakes3.hostname, to_text(port)), use_ssl=fakes3.scheme == 'fakes3s', **aws_connect_kwargs) elif is_walrus(s3_url): walrus = urlparse(s3_url).hostname params = dict(module=module, conn_type='client', resource='s3', region=location, endpoint=walrus, **aws_connect_kwargs) else: params = dict(module=module, conn_type='client', resource='s3', region=location, endpoint=s3_url, **aws_connect_kwargs) return boto3_conn(**params)
def _request(self, url, failmsg, f): url_to_use = url parsed_url = urlparse(url) if parsed_url.scheme == 's3': parsed_url = urlparse(url) bucket_name = parsed_url.netloc key_name = parsed_url.path[1:] client = boto3.client( 's3', aws_access_key_id=self.module.params.get('username', ''), aws_secret_access_key=self.module.params.get('password', '')) url_to_use = client.generate_presigned_url('get_object', Params={ 'Bucket': bucket_name, 'Key': key_name }, ExpiresIn=10) req_timeout = self.module.params.get('timeout') # Hack to add parameters in the way that fetch_url expects self.module.params['url_username'] = self.module.params.get( 'username', '') self.module.params['url_password'] = self.module.params.get( 'password', '') self.module.params['http_agent'] = self.module.params.get( 'user_agent', None) response, info = fetch_url(self.module, url_to_use, timeout=req_timeout) if info['status'] != 200: raise ValueError(failmsg + " because of " + info['msg'] + "for URL " + url_to_use) else: return f(response)
def parse_gcp_url(url): """ /compute/v1/projects/supertom-graphite/global/backendServices/bes /SERVICE/'projects'/PROJECT_ID/LOCATION/RESOURCE/RESOURCE_NAME # TODO(supertom): possibly handle URLs with just the GCP path # LOCATION FORWARD """ p = urlparse(url) if not p: return None else: # we add extra items such as # zone, region and resource_name url_parts = {} url_parts['scheme'] = p.scheme url_parts['host'] = p.netloc url_parts['path'] = p.path url_parts['params'] = p.params url_parts['fragment'] = p.fragment url_parts['query'] = p.query url_parts['project'] = None url_parts['service'] = None url_parts['api_version'] = None path_parts = p.split('/') url_parts['service'] = path_parts[0] url_parts['api_version'] = path_parts[1] if path_parts[2] == 'projects': url_parts['project'] = path_parts[3] location_type = path_parts[4] if location_type == 'regions': url_parts['region'] = path_parts[5] elif location_type == 'zones': url_parts['zone'] = path_parts[5] else: url_parts['global'] = True # TODO(supertom): should be more robust url_parts['resource_name'] = url_parts[-1] return url_parts
def main(): module = AnsibleModule(argument_spec=dict( group_id=dict(default=None), artifact_id=dict(default=None), version=dict(default="latest"), classifier=dict(default=None), extension=dict(default='jar'), repository_url=dict(default=None), username=dict(default=None, aliases=['aws_secret_key']), password=dict( default=None, no_log=True, aliases=['aws_secret_access_key']), state=dict(default="present", choices=["present", "absent"] ), # TODO - Implement a "latest" state timeout=dict(default=10, type='int'), dest=dict(type="path", default=None), validate_certs=dict(required=False, default=True, type='bool'), )) repository_url = module.params["repository_url"] if not repository_url: repository_url = "http://repo1.maven.org/maven2" try: parsed_url = urlparse(repository_url) except AttributeError as e: module.fail_json(msg='url parsing went wrong %s' % e) if parsed_url.scheme == 's3' and not HAS_BOTO: module.fail_json( msg= 'boto3 required for this module, when using s3:// repository URLs') group_id = module.params["group_id"] artifact_id = module.params["artifact_id"] version = module.params["version"] classifier = module.params["classifier"] extension = module.params["extension"] state = module.params["state"] dest = module.params["dest"] #downloader = MavenDownloader(module, repository_url, repository_username, repository_password) downloader = MavenDownloader(module, repository_url) try: artifact = Artifact(group_id, artifact_id, version, classifier, extension) except ValueError as e: module.fail_json(msg=e.args[0]) prev_state = "absent" if os.path.isdir(dest): dest = posixpath.join(dest, artifact_id + "-" + version + "." + extension) if os.path.lexists(dest) and downloader.verify_md5( dest, downloader.find_uri_for_artifact(artifact) + '.md5'): prev_state = "present" else: path = os.path.dirname(dest) if not os.path.exists(path): os.makedirs(path) if prev_state == "present": module.exit_json(dest=dest, state=state, changed=False) try: if downloader.download(artifact, dest): module.exit_json(state=state, dest=dest, group_id=group_id, artifact_id=artifact_id, version=version, classifier=classifier, extension=extension, repository_url=repository_url, changed=True) else: module.fail_json(msg="Unable to download the artifact") except ValueError as e: module.fail_json(msg=e.args[0])