def _poll_action(self): elapsed = datetime.now() - self.time_start if elapsed.total_seconds() > self.options["timeout"]: raise CumulusCIException( f'Sharing enablement not completed after {self.options["timeout"]} seconds' ) for sobject in self.owds: # The Tooling API requires that we use fully-qualified sObject names in namespaced scratch orgs. # However, the Metadata API requires no namespaces in that context. # Dynamically inject the namespace if required. real_api_name = ( f"{self.project_config.project__package__namespace}__{sobject}" if self.org_config.namespaced and sobject.count("__") == 1 else sobject) result = self.sf.query( f"SELECT ExternalSharingModel, InternalSharingModel " f"FROM EntityDefinition " f"WHERE QualifiedApiName = '{real_api_name}'") if result["totalSize"] == 1: record = result["records"][0] if (self.owds[sobject]["internal_sharing_model"] and record["InternalSharingModel"] != self.owds[sobject]["internal_sharing_model"]) or ( self.owds[sobject]["external_sharing_model"] and record["ExternalSharingModel"] != self.owds[sobject]["external_sharing_model"]): return else: raise CumulusCIException("Unable to determine sharing model") self.poll_complete = True
def gist(runtime): if CCI_LOGFILE_PATH.is_file(): log_content = CCI_LOGFILE_PATH.read_text() else: log_not_found_msg = """No logfile to open at path: {} Please ensure you're running this command from the same directory you were experiencing an issue.""" error_msg = log_not_found_msg.format(CCI_LOGFILE_PATH) click.echo(error_msg) raise CumulusCIException(error_msg) last_cmd_header = "\n\n\nLast Command Run\n================================\n" filename = f"cci_output_{datetime.utcnow()}.txt" files = { filename: { "content": f"{get_context_info()}{last_cmd_header}{log_content}" } } try: gh = RUNTIME.keychain.get_service("github") gist = create_gist( get_github_api(gh.config["username"], gh.config["password"]), "CumulusCI Error Output", files, ) except github3.exceptions.NotFoundError: raise CumulusCIException(GIST_404_ERR_MSG) except Exception as e: raise CumulusCIException( f"An error occurred attempting to create your gist:\n{e}") else: click.echo(f"Gist created: {gist.html_url}") webbrowser.open(gist.html_url)
def _get_user_id_by_query(self, where: str) -> str: # Query for the User removing a "WHERE " prefix from where if exists. query = "SELECT Id FROM User WHERE {} LIMIT 2".format( re.sub(r"^WHERE ", "", where, flags=re.I)) self.logger.info(f"Querying User: {query}") user_ids = [] try: for record in self.sf.query_all(query)["records"]: user_ids.append(record["Id"]) except SalesforceMalformedRequest as e: # Raise an easier to digest exception. self._raise_cumulusci_exception(e) # Validate only 1 User found. if len(user_ids) < 1: raise CumulusCIException("No Users found.") if 1 < len(user_ids): raise CumulusCIException( "More than one User found (at least 2): {}".format( ", ".join(user_ids))) # Log and return User ID. self.logger.info( f"Uploading profile photo for the User with ID {user_ids[0]}") return user_ids[0]
def _insert_content_document(self, photo_path) -> str: path = pathlib.Path(photo_path) if not path.exists(): raise CumulusCIException(f"No photo found at {path}") self.logger.info(f"Setting user photo to {path}") result = self.sf.ContentVersion.create({ "PathOnClient": path.name, "Title": path.stem, "VersionData": base64.b64encode(path.read_bytes()).decode("utf-8"), }) if not result["success"]: raise CumulusCIException( "Failed to create photo ContentVersion: {}".format( result["errors"])) content_version_id = result["id"] # Query the ContentDocumentId for our created record. content_document_id = self.sf.query( f"SELECT Id, ContentDocumentId FROM ContentVersion WHERE Id = '{content_version_id}'" )["records"][0]["ContentDocumentId"] self.logger.info( f"Uploaded profile photo ContentDocument {content_document_id}") return content_document_id
def _poll_action(self): elapsed = datetime.now() - self.time_start if elapsed.total_seconds() > self.options["timeout"]: raise CumulusCIException( f'Sharing enablement not completed after {self.options["timeout"]} seconds' ) for sobject in self.owds: result = self.sf.query( f"SELECT ExternalSharingModel, InternalSharingModel " f"FROM EntityDefinition " f"WHERE QualifiedApiName = '{sobject}'") if result["totalSize"] == 1: record = result["records"][0] if (self.owds[sobject]["internal_sharing_model"] and record["InternalSharingModel"] != self.owds[sobject]["internal_sharing_model"]) or ( self.owds[sobject]["external_sharing_model"] and record["ExternalSharingModel"] != self.owds[sobject]["external_sharing_model"]): return else: raise CumulusCIException("Unable to determine sharing model") self.poll_complete = True
def _run_task(self): # Determine existing assignments if "user_alias" not in self.options: query = ( f"SELECT Id,(SELECT {self.assignment_lookup} FROM {self.assignment_child_relationship}) " "FROM User " f"WHERE Username = '******'") else: query = ( f"SELECT Id,(SELECT {self.assignment_lookup} FROM {self.assignment_child_relationship}) " "FROM User " f"""WHERE Alias = '{self.options["user_alias"]}'""") result = self.sf.query(query) if result["totalSize"] != 1: raise CumulusCIException( "A single User was not found matching the specified alias.") user = result["records"][0] assigned_perms = {} # PermissionSetLicenseAssignments actually returns None if there are no assignments instead of an empty list of records. Wow. if user[self.assignment_child_relationship]: assigned_perms = { r[self.assignment_lookup] for r in user[self.assignment_child_relationship]["records"] } # Find Ids for requested Perms api_names = "', '".join(self.options["api_names"]) perms = self.sf.query( f"SELECT Id,{self.permission_name_field} FROM {self.permission_name} WHERE {self.permission_name_field} IN ('{api_names}')" ) perms = { p[self.permission_name_field]: p["Id"] for p in perms["records"] } # Raise for missing perms for api_name in self.options["api_names"]: if api_name not in perms: raise CumulusCIException( f"{self.permission_label} {api_name} was not found.") # Assign all not already assigned for api_name in self.options["api_names"]: if perms[api_name] not in assigned_perms: self.logger.info( f'Assigning {self.permission_label} "{api_name}".') assignment = { "AssigneeId": user["Id"], } assignment[self.assignment_lookup] = perms[api_name] # Create the new assignment. getattr(self.sf, self.assignment_name).create(assignment) else: self.logger.warning( f'{self.permission_label} "{api_name}" is already assigned.' )
def _run_task(self): # Determine existing assignments if "user_alias" not in self.options: query = f"""SELECT Id, (SELECT PermissionSetId FROM PermissionSetAssignments) FROM User WHERE Username = '******'""" else: query = f"""SELECT Id, (SELECT PermissionSetId FROM PermissionSetAssignments) FROM User WHERE Alias = '{self.options["user_alias"]}'""" result = self.sf.query(query) if result["totalSize"] != 1: raise CumulusCIException( "A single User was not found matching the specified alias.") user = result["records"][0] assigned_permsets = { r["PermissionSetId"] for r in user["PermissionSetAssignments"]["records"] } # Find Ids for requested Perm Sets api_names = "', '".join(self.options["api_names"]) permsets = self.sf.query( f"SELECT Id, Name FROM PermissionSet WHERE Name IN ('{api_names}')" ) permsets = {p["Name"]: p["Id"] for p in permsets["records"]} # Raise for missing permsets for api_name in self.options["api_names"]: if api_name not in permsets: raise CumulusCIException( f"Permission Set {api_name} was not found.") # Assign all not already assigned for api_name in self.options["api_names"]: if permsets[api_name] not in assigned_permsets: self.logger.info(f"Assigning permission set {api_name}.") self.sf.PermissionSetAssignment.create({ "AssigneeId": user["Id"], "PermissionSetId": permsets[api_name], }) else: self.logger.info( f"Permission set {api_name} is already assigned.")
def _expand_profile_members(self, package_xml): profile_names = package_xml.find("types", name="Profile") if not profile_names: raise CumulusCIException( "The package.xml does not contain a Profiles member.") for profile in self.api_names: profile_names.append("members", text=profile)
def _init_options(self, kwargs): super(GenerateApexDocs, self)._init_options(kwargs) self.options["command"] = None if "source_directory" not in self.options: self.options["source_directory"] = os.path.join( self.project_config.repo_root, "src", "classes") if "out_dir" not in self.options: self.options["out_dir"] = ( self.project_config.project__apexdoc__dir if self.project_config.project__apexdoc__dir else self.project_config.repo_root) if "tag" not in self.options: self.options["tag"] = None if "home_page" not in self.options: self.options["home_page"] = ( self.project_config.project__apexdoc__homepage if self.project_config.project__apexdoc__homepage else None) if "banner_page" not in self.options: self.options["banner_page"] = ( self.project_config.project__apexdoc__banner if self.project_config.project__apexdoc__banner else None) if "scope" not in self.options: self.options["scope"] = ( self.project_config.project__apexdoc__scope if self.project_config.project__apexdoc__scope else None) if "version" not in self.options: if not self.project_config.project__apexdoc__version: raise CumulusCIException("ApexDoc version required") self.options[ "version"] = self.project_config.project__apexdoc__version
def _init_task(self): super(GenerateApexDocs, self)._init_task() self.working_dir = tempfile.mkdtemp() self.jar_path = os.path.join(self.working_dir, self.jar_file) if self.options[ "tag"] and not self.project_config.project__git__repo_url: raise CumulusCIException("Repo URL not found in cumulusci.yml")
def _init_options(self, kwargs): super(GenerateApexDocs, self)._init_options(kwargs) self.options['command'] = None if 'source_directory' not in self.options: self.options['source_directory'] = os.path.join( self.project_config.repo_root, 'src', 'classes', ) if 'out_dir' not in self.options: self.options['out_dir'] = ( self.project_config.project__apexdoc__dir if self.project_config.project__apexdoc__dir else self.project_config.repo_root) if 'tag' not in self.options: self.options['tag'] = None if 'home_page' not in self.options: self.options['home_page'] = ( self.project_config.project__apexdoc__homepage if self.project_config.project__apexdoc__homepage else None) if 'banner_page' not in self.options: self.options['banner_page'] = ( self.project_config.project__apexdoc__banner if self.project_config.project__apexdoc__banner else None) if 'scope' not in self.options: self.options['scope'] = ( self.project_config.project__apexdoc__scope if self.project_config.project__apexdoc__scope else None) if 'version' not in self.options: if not self.project_config.project__apexdoc__version: raise CumulusCIException('ApexDoc version required') self.options[ 'version'] = self.project_config.project__apexdoc__version
def safe_json_from_response(response): "Check JSON response is HTTP200 and actually JSON." response.raise_for_status() try: return response.json() except JSONDecodeError: raise CumulusCIException(f"Cannot decode as JSON: {response.text}")
def _init_task(self): super(GenerateApexDocs, self)._init_task() self.working_dir = tempfile.mkdtemp() self.jar_path = os.path.join(self.working_dir, self.jar_file) if not self.project_config.project__git__repo_url: raise CumulusCIException('Repo URL not found in cumulusci.yml') self.source_url = '{}/blob/{}/src/classes/'.format( self.project_config.project__git__repo_url, self.options['tag'], )
def _run_task(self): orgs = self._get_orgs() package = self._get_package(self.options.get('namespace')) version = self._get_version(package, self.options.get('version')) start_time = self.options.get('start_time') if start_time: if start_time.lower() == 'now': start_time = datetime.utcnow() + timedelta(seconds=5) else: start_time = datetime.strptime(start_time, '%Y-%m-%dT%H:%M') if start_time < datetime.utcnow(): raise CumulusCIException('Start time cannot be in the past') else: # delay a bit to allow for review delay_minutes = 5 self.logger.warn( 'Scheduling push for %d minutes from now', delay_minutes, ) start_time = datetime.utcnow() + timedelta(minutes=delay_minutes) self.request_id, num_scheduled_orgs = self.push.create_push_request( version, orgs, start_time, ) self.return_values['request_id'] = self.request_id if num_scheduled_orgs > 1000: sleep_time_s = 30 self.logger.info( 'Delaying {} seconds to allow all jobs to initialize'.format( sleep_time_s)) time.sleep(sleep_time_s) elif num_scheduled_orgs == 0: self.logger.warn('Canceling push request with 0 orgs') self.push.cancel_push_request return self.logger.info('Setting status to Pending to queue execution.') self.logger.info( 'The push upgrade will start at UTC {}'.format(start_time)) # Run the job self.logger.info(self.push.run_push_request(self.request_id)) self.logger.info('Push Request {} is queued for execution.'.format( self.request_id)) # Report the status if start time is less than 1 minute from now if start_time - datetime.utcnow() < timedelta(minutes=1): self._report_push_status(self.request_id) else: self.logger.info('Exiting early since request is in the future')
def latest_api_version(self): if not self._latest_api_version: headers = {"Authorization": "Bearer " + self.access_token} response = requests.get(self.instance_url + "/services/data", headers=headers) try: version = safe_json_from_response(response)[-1]["version"] except (KeyError, IndexError, TypeError): raise CumulusCIException( f"Cannot decode API Version `{response.text[0:100]}``") self._latest_api_version = str(version) return self._latest_api_version
def _run_task(self): global_describe = self.sf.describe() sobject_names = [x["name"] for x in global_describe["sobjects"]] if self.options["restore"]: with open(self.options["restore_file"], "r") as f: target_status = yaml.safe_load(f) self.options["handlers"] = list(target_status.keys()) else: target_status = defaultdict(lambda: self.options["active"]) namespace = self.options.get("namespace", "") + "__" if f"{namespace}Trigger_Handler__c" in sobject_names: pass elif "Trigger_Handler__c" in sobject_names: namespace = "" else: raise CumulusCIException( "Unable to locate the Trigger Handler sObject. " "Ensure the namespace option is set correctly.") proxy_obj = getattr(self.sf, f"{namespace}Trigger_Handler__c") trigger_handlers = self.sf.query( f"SELECT Id, {namespace}Class__c, {namespace}Object__c, {namespace}Active__c FROM {namespace}Trigger_Handler__c" ) current_status = {} for handler in trigger_handlers.get("records", []): class_name = handler[f"{namespace}Class__c"] object_name = handler[f"{namespace}Object__c"] compound_name = f"{object_name}:{class_name}" if not self.options["handlers"] or any([ class_name in self.options["handlers"], object_name in self.options["handlers"], compound_name in self.options["handlers"], ]): current_status[compound_name] = handler[ f"{namespace}Active__c"] proxy_obj.update( handler["Id"], {f"{namespace}Active__c": target_status[compound_name]}, ) if "restore_file" in self.options: if not self.options["restore"]: with open(self.options["restore_file"], "w") as f: yaml.safe_dump(current_status, f) else: os.remove(self.options["restore_file"])
def _get_test_methods_for_class(self, class_name): result = self.tooling.query( f"SELECT SymbolTable FROM ApexClass WHERE Name='{class_name}'") test_methods = [] try: methods = result["records"][0]["SymbolTable"]["methods"] except (TypeError, IndexError, KeyError): raise CumulusCIException( f"Unable to acquire symbol table for failed Apex class {class_name}" ) for m in methods: for a in m.get("annotations", []): if a["name"].lower() in ["istest", "testmethod"]: test_methods.append(m["name"]) break return test_methods
def _expand_package_xml(self, package_xml): # Query the target org for all namespaced objects # Add these entities to the package.xml results = self.tooling.query_all( "SELECT DeveloperName, NamespacePrefix FROM CustomObject WHERE ManageableState != 'unmanaged'" ) custom_objects = package_xml.find("types", name="CustomObject") if not custom_objects: raise CumulusCIException( "Unable to add packaged objects to package.xml because it does not contain a <types> tag of type CustomObject." ) for record in results.get("records", []): custom_objects.append( "members", text= f"{record['NamespacePrefix']}__{record['DeveloperName']}__c", )
def has_minimum_package_version(self, package_identifier, version_identifier): """Return True if the org has a version of the specified package that is equal to or newer than the supplied version identifier. The package identifier may be either a namespace or a 033 package Id. The version identifier should be in "1.2.3" or "1.2.3b4" format. A CumulusCIException will be thrown if you request to check a namespace and multiple second-generation packages sharing that namespace are installed. Use a package Id to handle this circumstance.""" installed_version = self.installed_packages.get(package_identifier) if not installed_version: return False elif len(installed_version) > 1: raise CumulusCIException( f"Cannot check installed version of {package_identifier}, because multiple " f"packages are installed that match this identifier.") return installed_version[0] >= version_identifier
def _run_task(self): community = self._get_community() if community is not None: raise CumulusCIException( 'A community named "{}" already exists.'.format( self.options["name"])) self.logger.info('Creating community "{}"'.format( self.options["name"])) tries = 0 while True: tries += 1 try: self._create_community() except Exception as e: if tries > self.options["retries"]: raise else: self.logger.warning(str(e)) self.logger.info("Retrying community creation request") self.poll_interval_s = 1 else: break # pragma: no cover
def _transform(self): # call _transform_entity once per retrieved entity # if the entity is an XML file, provide a parsed version # and write the returned metadata into the deploy directory parser = PackageXmlGenerator( None, self.api_version) # We'll use it for its metadata_map entity_configurations = [ entry for entry in parser.metadata_map if any([ subentry["type"] == self.entity for subentry in parser.metadata_map[entry] ]) ] if not entity_configurations: raise CumulusCIException( f"Unable to locate configuration for entity {self.entity}") configuration = parser.metadata_map[entity_configurations[0]][0] if configuration["class"] not in [ "MetadataFilenameParser", "CustomObjectParser", ]: raise CumulusCIException( f"MetadataSingleEntityTransformTask only supports manipulating complete, file-based XML entities (not {self.entity})" ) extension = configuration["extension"] directory = entity_configurations[0] source_metadata_dir = self.retrieve_dir / directory if "*" in self.api_names: # Walk the retrieved directory to get the actual suite # of API names retrieved and rebuild our api_names list. self.api_names.remove("*") self.api_names = self.api_names.union( metadata_file.stem for metadata_file in source_metadata_dir.iterdir() if metadata_file.suffix == f".{extension}") removed_api_names = set() for api_name in self.api_names: # Page Layout names can contain spaces, but parentheses and other # characters like ' and < are quoted. # We quote user-specified API names so we can locate the corresponding # metadata files, but present them un-quoted in messages to the user. unquoted_api_name = unquote(api_name) path = source_metadata_dir / f"{api_name}.{extension}" if not path.exists(): raise CumulusCIException(f"Cannot find metadata file {path}") try: tree = metadata_tree.parse(str(path)) except SyntaxError as err: err.filename = path raise err transformed_xml = self._transform_entity(tree, unquoted_api_name) if transformed_xml: parent_dir = self.deploy_dir / directory if not parent_dir.exists(): parent_dir.mkdir() destination_path = parent_dir / f"{api_name}.{extension}" with destination_path.open(mode="w", encoding="utf-8") as f: f.write(transformed_xml.tostring(xml_declaration=True)) else: # Make sure to remove from our package.xml removed_api_names.add(api_name) self.api_names = self.api_names - removed_api_names
def _load_settings(self): # For each top-level heading in our YAML doc, create one or more # custom settings. for custom_setting, settings_data in self.settings.items(): proxy_obj = getattr(self.sf, custom_setting) # If this level is a dict, we're working with a List Custom Setting # If it's a list, we have a Hierarchy Custom Setting. if isinstance(settings_data, dict): for setting_instance, instance_data in settings_data.items(): self.logger.info( f"Loading List Custom Setting {custom_setting}.{setting_instance}" ) proxy_obj.upsert("Name/{}".format(setting_instance), instance_data) elif isinstance(settings_data, list): for setting_instance in settings_data: query = None if "location" in setting_instance: if "profile" in setting_instance["location"]: # Query for a matching Profile to assign the Setup Owner Id. profile_name = setting_instance["location"][ "profile"] query = ( f"SELECT Id FROM Profile WHERE Name = '{profile_name}'" ) elif "user" in setting_instance["location"]: if "name" in setting_instance["location"]["user"]: # Query for a matching User to assign the Setup Owner Id. user_name = setting_instance["location"][ "user"]["name"] query = f"SELECT Id FROM User WHERE Username = '******'" elif "email" in setting_instance["location"][ "user"]: # Query for a matching User to assign the Setup Owner Id. email_address = setting_instance["location"][ "user"]["email"] query = f"SELECT Id FROM User WHERE Email = '{email_address}'" elif "org" == setting_instance["location"]: # Assign the Setup Owner Id to the organization. query = "SELECT Id FROM Organization" if query is None: raise CumulusCIException( f"No valid Setup Owner assignment found for Custom Setting {custom_setting}. Add a `location:` key." ) matches = self.sf.query(query) if matches["totalSize"] != 1: raise CumulusCIException( f"{matches['totalSize']} records matched the settings location query {query}. Exactly one result is required." ) setup_owner_id = matches["records"][0]["Id"] # We can't upsert on SetupOwnerId. Query for any existing records. existing_records = self.sf.query( f"SELECT Id FROM {custom_setting} WHERE SetupOwnerId = '{setup_owner_id}'" ) setting_instance["data"].update( {"SetupOwnerId": setup_owner_id}) if existing_records["totalSize"] == 0: self.logger.info( f"Loading Hierarchy Custom Setting {custom_setting} with owner id {setup_owner_id}" ) proxy_obj.create(setting_instance["data"]) else: self.logger.info( f"Updating Hierarchy Custom Setting {custom_setting} with owner id {setup_owner_id}" ) proxy_obj.update( existing_records["records"][0]["Id"], setting_instance["data"], ) else: raise CumulusCIException( "Each Custom Settings entry must be a list or a map structure." )
def _raise_cumulusci_exception(self, e: SalesforceMalformedRequest) -> None: raise CumulusCIException(join_errors(e))