def execute(self): debug_users = config.project.get_debug_users() debug_settings = config.project.get_debug_settings() if self.params.get('running_user_only', False): payload = {} payload["debug_categories"] = debug_settings["levels"] payload["expiration"] = debug_settings["expiration"] payload["user_id"] = config.sfdc_client.user_id payload["type"] = "user" response = NewTraceFlagCommand(params=payload).execute() response = json.loads(response) if "success" in response and response["success"] == False: return util.generate_error_response(response["errors"][0]) return util.generate_success_response('Logging for runner user setup successfully') else: for u in debug_users: payload = {} payload["debug_categories"] = debug_settings["levels"] payload["expiration"] = debug_settings["expiration"] payload["user_id"] = u payload["type"] = "user" response = NewTraceFlagCommand(params=payload).execute() response = json.loads(response) if "success" in response and response["success"] == False: return util.generate_error_response(response["errors"][0]) return util.generate_success_response('{0} Log(s) created successfully'.format(str(len(debug_users))))
def execute(self): debug_users = config.project.get_debug_users() debug_settings = config.project.get_debug_settings() if self.params.get('running_user_only', False): payload = {} payload["debug_categories"] = debug_settings["levels"] payload["expiration"] = debug_settings["expiration"] payload["user_id"] = config.sfdc_client.user_id payload["type"] = "user" response = NewTraceFlagCommand(params=payload).execute() response = json.loads(response) if "success" in response and response["success"] == False: return util.generate_error_response(response["errors"][0]) return util.generate_success_response( 'Logging for runner user setup successfully') else: for u in debug_users: payload = {} payload["debug_categories"] = debug_settings["levels"] payload["expiration"] = debug_settings["expiration"] payload["user_id"] = u payload["type"] = "user" response = NewTraceFlagCommand(params=payload).execute() response = json.loads(response) if "success" in response and response["success"] == False: return util.generate_error_response(response["errors"][0]) return util.generate_success_response( '{0} Log(s) created successfully'.format(str( len(debug_users))))
def execute(self): project = config.project if 'directories' in self.params and len(self.params['directories']) == 1 and os.path.basename(self.params['directories'][0]) == "src": return project.clean(reset_metadata_container=False) else: retrieve_result = project.get_retrieve_result(self.params) #take this opportunity to freshen the cache project.conflict_manager.refresh_local_store(retrieve_result.fileProperties) util.extract_base64_encoded_zip(retrieve_result.zipFile, project.location) #TODO: handle exception that could render the project unusable bc of lost files #replace project metadata with retrieved metadata for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"unpackaged")): for filename in filenames: full_file_path = os.path.join(dirname, filename) if '/unpackaged/package.xml' in full_file_path or '\\unpackaged\\package.xml' in full_file_path: continue if 'win32' in sys.platform: destination = full_file_path.replace('\\unpackaged\\', '\\src\\') else: destination = full_file_path.replace('/unpackaged/', '/src/') destination_directory = os.path.dirname(destination) if not os.path.exists(destination_directory): os.makedirs(destination_directory) shutil.move(full_file_path, destination) shutil.rmtree(os.path.join(project.location,"unpackaged")) if os.path.exists(os.path.join(project.location,"metadata.zip")): os.remove(os.path.join(project.location,"metadata.zip")) return util.generate_success_response("Refresh Completed Successfully")
def execute(self): sfdc_client = config.sfdc_client empty_package_xml = util.get_empty_package_xml_contents() tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True) util.put_empty_package_xml_in_directory(tmp_unpackaged, empty_package_xml) zip_file = util.zip_directory(tmp, tmp) deploy_params = { "zip_file" : zip_file, "rollback_on_error" : True, "ret_xml" : True, "classes" : self.params.get('classes', []), "debug_categories" : self.params.get('debug_categories', []) } deploy_result = sfdc_client.deploy(deploy_params,is_test=True) #debug(deploy_result) d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor) if int(float(util.SFDC_API_VERSION)) >= 29: result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']['details']['runTestResult'] else: result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']['runTestResult'] try: result['log'] = d["soapenv:Envelope"]["soapenv:Header"]["DebuggingInfo"]["debugLog"] except: result['log'] = 'Log not available.' shutil.rmtree(tmp) if self.args.respond_with_html: html = util.generate_html_response(self.args.operation, result, self.params) return util.generate_success_response(html, "html") else: return result
def execute(self): if self.args.respond_with_html == True: health_check_dict = config.project.run_health_check() html = util.generate_html_response(self.args.operation, health_check_dict) return util.generate_success_response(html, "html") else: return json.dumps(config.project.run_health_check(), indent=4)
def execute(self): if self.args.respond_with_html == True: health_check_dict = config.project.run_health_check() html = util.generate_html_response(self.args.operation, health_check_dict) return util.generate_success_response(html, "html") else: return json.dumps(config.project.run_health_check(),indent=4)
def execute(self): users = self.params.get('users', None) levels = self.params.get('debug_categories', None) expiration = self.params.get('expiration', None) config.project.put_debug_file(users, levels, expiration) return util.generate_success_response( "Debug settings updated successfully")
def execute(self): if int(float(util.SFDC_API_VERSION)) <= 28 or config.connection.get_plugin_client_setting("mm_use_legacy_test_ui", False): #raise MMException("This command requires mm_api_version to be set to 29.0 or higher.") return RunUnitTestsCommand(params=self.params,args=self.args).execute() project = config.project sfdc_client = config.sfdc_client generate_logs = self.params.get("generate_logs", False) if generate_logs: NewQuickTraceFlagCommand(params={"running_user_only":True}).execute() test_classes = self.params.get("classes", None) debug('running tests for') debug(test_classes) if test_classes == None or test_classes == []: #need to run all tests in project classes = [] triggers = [] test_classes = [] for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"src","classes")): for filename in filenames: if "test" in filename.lower() and "-meta.xml" not in filename: test_classes.append(util.get_file_name_no_extension(filename)) elif "-meta.xml" not in filename: classes.append(util.get_file_name_no_extension(filename)) for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"src","triggers")): for filename in filenames: if "-meta.xml" not in filename: triggers.append(util.get_file_name_no_extension(filename)) else: #user has specified certain tests to run classes = [] triggers = [] for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"src","classes")): for filename in filenames: if "test" not in filename.lower() and "-meta.xml" not in filename: classes.append(util.get_file_name_no_extension(filename)) for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"src","triggers")): for filename in filenames: if "-meta.xml" not in filename: triggers.append(util.get_file_name_no_extension(filename)) params = { "files" : test_classes } test_results = sfdc_client.run_async_apex_tests(params, False) params = { "classes" : classes, "triggers" : triggers, "test_classes" : test_classes } coverage_report = sfdc_client.get_apex_test_coverage(params, transform_ids=True) debug(">>>>>>>>>>") debug(coverage_report) result = { "test_results" : test_results, "coverage" : coverage_report } if self.args.respond_with_html: html = util.generate_html_response(self.args.operation, result, self.params) return util.generate_success_response(html, "html") else: return result
def execute(self): result = config.sfdc_client.get_apex_checkpoints() if 'records' not in result or len(result['records']) == 0: config.project.put_overlays_file('[]') return util.generate_success_response( 'Could Not Find Any Apex Execution Overlays') else: id_to_name_map = {} class_ids = [] trigger_ids = [] for r in result['records']: entity_id = r["ExecutableEntityId"] if entity_id.startswith('01q'): trigger_ids.append("Id = '" + entity_id + "'") elif entity_id.startswith('01p'): class_ids.append("Id = '" + entity_id + "'") class_filter = ' or '.join(class_ids) trigger_filter = ' or '.join(trigger_ids) if len(class_ids) > 0: soql = 'Select Id, Name From ApexClass WHERE ' + class_filter class_result = config.sfdc_client.execute_query(soql) if 'records' in class_result: for r in class_result['records']: id_to_name_map[r['Id']] = r['Name'] if len(trigger_ids) > 0: soql = 'Select Id, Name From ApexTrigger WHERE ' + trigger_filter trigger_result = config.sfdc_client.execute_query(soql) if 'records' in trigger_result: for r in trigger_result['records']: id_to_name_map[r['Id']] = r['Name'] for r in result['records']: r['API_Name'] = id_to_name_map[r['ExecutableEntityId']] overlays = json.dumps(result['records']) config.project.put_overlays_file(overlays) return util.generate_success_response( 'Apex Execution Overlays Successfully Indexed to config/.overlays' )
def execute(self): result = config.sfdc_client.get_apex_checkpoints() if 'records' not in result or len(result['records']) == 0: project.put_overlays_file('[]') return util.generate_success_response('Could Not Find Any Apex Execution Overlays') else: id_to_name_map = {} class_ids = [] trigger_ids = [] for r in result['records']: entity_id = r["ExecutableEntityId"] if entity_id.startswith('01q'): trigger_ids.append("Id = '"+entity_id+"'") elif entity_id.startswith('01p'): class_ids.append("Id = '"+entity_id+"'") class_filter = ' or '.join(class_ids) trigger_filter = ' or '.join(trigger_ids) if len(class_ids) > 0: soql = 'Select Id, Name From ApexClass WHERE '+class_filter class_result = config.sfdc_client.execute_query(soql) if 'records' in class_result: for r in class_result['records']: id_to_name_map[r['Id']] = r['Name'] if len(trigger_ids) > 0: soql = 'Select Id, Name From ApexTrigger WHERE '+trigger_filter trigger_result = config.sfdc_client.execute_query(soql) if 'records' in trigger_result: for r in trigger_result['records']: id_to_name_map[r['Id']] = r['Name'] for r in result['records']: r['API_Name'] = id_to_name_map[r['ExecutableEntityId']] overlays = json.dumps(result['records']) config.project.put_overlays_file(overlays) return util.generate_success_response('Apex Execution Overlays Successfully Indexed to config/.overlays')
def execute(self): result = config.sfdc_client.get_apex_checkpoints() if "records" not in result or len(result["records"]) == 0: project.put_overlays_file("[]") return util.generate_success_response("Could Not Find Any Apex Execution Overlays") else: id_to_name_map = {} class_ids = [] trigger_ids = [] for r in result["records"]: entity_id = r["ExecutableEntityId"] if entity_id.startswith("01q"): trigger_ids.append("Id = '" + entity_id + "'") elif entity_id.startswith("01p"): class_ids.append("Id = '" + entity_id + "'") class_filter = " or ".join(class_ids) trigger_filter = " or ".join(trigger_ids) if len(class_ids) > 0: soql = "Select Id, Name From ApexClass WHERE " + class_filter class_result = config.sfdc_client.execute_query(soql) if "records" in class_result: for r in class_result["records"]: id_to_name_map[r["Id"]] = r["Name"] if len(trigger_ids) > 0: soql = "Select Id, Name From ApexTrigger WHERE " + trigger_filter trigger_result = config.sfdc_client.execute_query(soql) if "records" in trigger_result: for r in trigger_result["records"]: id_to_name_map[r["Id"]] = r["Name"] for r in result["records"]: r["API_Name"] = id_to_name_map[r["ExecutableEntityId"]] overlays = json.dumps(result["records"]) config.project.put_overlays_file(overlays) return util.generate_success_response("Apex Execution Overlays Successfully Indexed to config/.overlays")
def execute(self): file_name = self.params["file_name"] extension = util.get_file_extension_no_period(file_name) mtype = util.get_meta_type_by_suffix(extension) full_file_path = os.path.join(config.project.location, "src", mtype["directoryName"], file_name) params = { "project_name" : config.project.project_name, "file_name" : full_file_path, "line_number" : self.params.get("line_number", 0) } config.connection.run_subl_command("open_file_in_project", json.dumps(params)) return util.generate_success_response("ok")
def execute(self): number_of_checkpoints = 0 # user_id = self.params.get('user_id', config.sfdc_client.user_id) limit = self.params.get("limit", 20) checkpoint_results = config.sfdc_client.get_apex_checkpoint_results(config.sfdc_client.user_id, limit) if "records" in checkpoint_results: number_of_checkpoints = len(checkpoint_results["records"]) if os.path.isdir(os.path.join(config.project.location, "debug", "checkpoints")): shutil.rmtree(os.path.join(config.project.location, "debug", "checkpoints")) os.makedirs(os.path.join(config.project.location, "debug", "checkpoints")) apex_entity_to_lines = {} for r in checkpoint_results["records"]: if "HeapDump" in r and "className" in r["HeapDump"]: if r["HeapDump"]["className"] not in apex_entity_to_lines: apex_entity_to_lines[r["HeapDump"]["className"]] = [r["Line"]] else: apex_entity_to_lines[r["HeapDump"]["className"]].append(r["Line"]) for apex_entity_name, lines in apex_entity_to_lines.items(): if not os.path.isdir(os.path.join(config.project.location, "debug", "checkpoints", apex_entity_name)): os.makedirs(os.path.join(config.project.location, "debug", "checkpoints", apex_entity_name)) for l in lines: if not os.path.isdir( os.path.join(config.project.location, "debug", "checkpoints", apex_entity_name, str(l)) ): os.makedirs( os.path.join(config.project.location, "debug", "checkpoints", apex_entity_name, str(l)) ) for r in checkpoint_results["records"]: if "HeapDump" in r and "className" in r["HeapDump"]: modstamp = r["HeapDump"]["heapDumpDate"] if config.is_windows: modstamp = modstamp.replace(":", " ") file_name = modstamp + "-" + r["UserId"] + ".json" file_path = os.path.join( config.project.location, "debug", "checkpoints", r["HeapDump"]["className"], str(r["Line"]), file_name, ) src = open(file_path, "w") src.write(json.dumps(r, sort_keys=True, indent=4)) src.close() else: config.logger.debug("No checkpoints to download") return util.generate_success_response(str(number_of_checkpoints) + " Checkpoints successfully downloaded")
def execute(self): org_connections = GetOrgConnectionsCommand(params=self.params).execute() config.logger.debug('=======') config.logger.debug(org_connections) updated_org_connections = [] for connection in org_connections: if connection['id'] != self.params['id']: updated_org_connections.append(connection) src = open(os.path.join(config.project.location,"config",".org_connections"), 'wb') json_data = json.dumps(updated_org_connections, sort_keys=False, indent=4) src.write(json_data) src.close() util.delete_password_by_key(self.params['id']) return util.generate_success_response('Org Connection Successfully Deleted')
def execute(self): archive_deployments = config.connection.get_plugin_client_setting( "mm_archive_deployments", True) deploy_metadata = config.sfdc_client.retrieve( package=self.params['package']) threads = [] for destination in self.params['destinations']: if archive_deployments: deploy_path = os.path.join(config.project.location, "deploy", destination['username']) if not os.path.exists(deploy_path): os.makedirs(deploy_path) ts = time.time() if not config.is_windows: timestamp = datetime.datetime.fromtimestamp(ts).strftime( '%Y-%m-%d %H:%M:%S') else: timestamp = datetime.datetime.fromtimestamp(ts).strftime( '%Y-%m-%d %H %M %S') os.makedirs( os.path.join(config.project.location, "deploy", destination['username'], timestamp)) util.extract_base64_encoded_zip( deploy_metadata.zipFile, os.path.join(config.project.location, "deploy", destination['username'], timestamp)) thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata) threads.append(thread) thread.start() deploy_results = [] for thread in threads: thread.join() deploy_results.append(thread.result) if self.args.respond_with_html == True: html = util.generate_html_response(self.args.operation, deploy_results, self.params) response = json.loads(util.generate_success_response(html, "html")) response['deploy_success'] = True # if deployment to one org fails, the entire deploy was not successful for result in deploy_results: if result['success'] == False: response['deploy_success'] = False break return json.dumps(response) else: return json.dumps(deploy_results, index=4)
def execute(self): if 'package' not in self.params: raise MMException('"package" definition required in JSON body') package = self.params['package'] #intercept and overwrite customobject retrieve to include standard objects if 'CustomObject' in package: for member in package['CustomObject']: if member == "*": pass #TODO clean_result = json.loads(config.project.clean(package=package,overwrite_package_xml=True)) if clean_result['success'] == True: return util.generate_success_response('Project Edited Successfully') else: return util.generate_error_response(clean_result['body'])
def execute(self): if 'package' not in self.params: raise MMException('"package" definition required in JSON body') package = self.params['package'] #intercept and overwrite customobject retrieve to include standard objects if 'CustomObject' in package: for member in package['CustomObject']: if member == "*": pass #TODO clean_result = config.project.clean(package=package,overwrite_package_xml=True) if clean_result['success'] == True: return util.generate_success_response('Project Edited Successfully') else: return util.generate_error_response(clean_result['body'])
def execute(self): if "package" not in self.params: raise MMException('"package" definition required in JSON body') package = self.params["package"] # intercept and overwrite customobject retrieve to include standard objects if "CustomObject" in package: for member in package["CustomObject"]: if member == "*": pass # TODO clean_result = json.loads(config.project.clean(package=package, overwrite_package_xml=True)) if clean_result["success"] == True: return util.generate_success_response("Project Edited Successfully") else: return util.generate_error_response(clean_result["body"])
def execute(self): c = MavensMateClient(credentials={ "username" : self.params['username'], "password" : self.params['password'], "org_type" : self.params['org_type'] }) org_connection_id = util.new_mavensmate_id() util.put_password_by_key(org_connection_id, self.params['password']) org_connections = GetOrgConnectionsCommand(params=self.params).execute() org_connections.append({ 'id' : org_connection_id, 'username' : self.params['username'], 'environment' : self.params['org_type'] }) src = open(os.path.join(config.project.location,"config",".org_connections"), 'wb') json_data = json.dumps(org_connections, sort_keys=False, indent=4) src.write(json_data) src.close() return util.generate_success_response('Org Connection Successfully Created')
def execute(self): org_connections = GetOrgConnectionsCommand( params=self.params).execute() config.logger.debug('=======') config.logger.debug(org_connections) updated_org_connections = [] for connection in org_connections: if connection['id'] != self.params['id']: updated_org_connections.append(connection) src = open( os.path.join(config.project.location, "config", ".org_connections"), 'wb') json_data = json.dumps(updated_org_connections, sort_keys=False, indent=4) src.write(json_data) src.close() util.delete_password_by_key(self.params['id']) return util.generate_success_response( 'Org Connection Successfully Deleted')
def execute(self): archive_deployments = config.connection.get_plugin_client_setting("mm_archive_deployments", True) deploy_metadata = config.sfdc_client.retrieve(package=self.params['package']) threads = [] for destination in self.params['destinations']: if archive_deployments: deploy_path = os.path.join(config.project.location,"deploy",destination['username']) if not os.path.exists(deploy_path): os.makedirs(deploy_path) ts = time.time() if not config.is_windows: timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') else: timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H %M %S') os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],timestamp)) util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],timestamp)) thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata) threads.append(thread) thread.start() deploy_results = [] for thread in threads: thread.join() deploy_results.append(thread.result) if self.args.respond_with_html == True: html = util.generate_html_response(self.args.operation, deploy_results, self.params) response = json.loads(util.generate_success_response(html, "html")) response['deploy_success'] = True # if deployment to one org fails, the entire deploy was not successful for result in deploy_results: if result['success'] == False: response['deploy_success'] = False break return json.dumps(response) else: return json.dumps(deploy_results,index=4)
def execute(self): c = MavensMateClient( credentials={ "username": self.params['username'], "password": self.params['password'], "org_type": self.params['org_type'] }) org_connection_id = util.new_mavensmate_id() util.put_password_by_key(org_connection_id, self.params['password']) org_connections = GetOrgConnectionsCommand( params=self.params).execute() org_connections.append({ 'id': org_connection_id, 'username': self.params['username'], 'environment': self.params['org_type'] }) src = open( os.path.join(config.project.location, "config", ".org_connections"), 'wb') json_data = json.dumps(org_connections, sort_keys=False, indent=4) src.write(json_data) src.close() return util.generate_success_response( 'Org Connection Successfully Created')
def execute(self): config.project.update_credentials(self.params) return util.generate_success_response("Your credentials were updated successfully")
def execute(self): IndexMetadataCommand(params=self.params).execute() return util.generate_success_response("Metadata refreshed successfully.")
def execute(self): project = config.project sfdc_client = config.sfdc_client if "files" in self.params: if "type" in self.params: open_type = self.params.get("type", None) else: open_type = "edit" files = self.params.get("files", None) if len(files) > 0: apex_file_properties = util.parse_json_from_file(os.path.join(project.location,"config",".local_store")) opened = [] for fileabs in files: basename = os.path.basename(fileabs) if basename not in apex_file_properties: # make sure we have meta data and then get the object type if os.path.isfile(fileabs+"-meta.xml"): xmldoc = minidom.parse(fileabs+"-meta.xml") root = xmldoc.firstChild object_type = root.nodeName else: continue object_id = sfdc_client.get_apex_entity_id_by_name(object_type=object_type, name=basename) if not object_id: continue else: props = apex_file_properties[basename] object_type = props['type'] object_id = props['id'] # only ApexClasses that are global and have webservice scope have WSDL files if open_type == "wsdl": if object_type != "ApexClass": continue with open(fileabs, 'r') as content_file: content = content_file.read() p = re.compile("global\s+(abstract\s+)?class\s", re.I + re.M) if not p.search(content): continue p = re.compile("\swebservice\s", re.I + re.M) if not p.search(content): continue # get the server instance url and set the redirect url frontdoor = "https://" + sfdc_client.server_url.split('/')[2] + "/secur/frontdoor.jsp?sid=" + sfdc_client.sid + "&retURL=" if open_type == "wsdl": f, e = os.path.splitext(basename) ret_url = "/services/wsdl/class/" + f else: f, ext = os.path.splitext(basename) if object_type == "CustomObject" and not f.endswith('__c'): # standard object? ret_url = "/p/setup/layout/LayoutFieldList?type=" + f + "%23CustomFieldRelatedList_target" else: ret_url = "/" + object_id # open the browser window for this file and track it webbrowser.open(frontdoor+ret_url, new=2) opened.append(basename) if len(opened) == 0: return util.generate_error_response("There were no valid files to open.") return util.generate_success_response("Opened "+(", ".join(opened))+" on server.") return util.generate_error_response("Unable to open file on server.") else: raise MMException("To open on Salesforce, you must provide an array of 'files'")
def execute(self): project = config.project sfdc_client = config.sfdc_client files = self.params.get('files', None) for f in files: if '-meta.xml' in f: corresponding_file = f.split('-meta.xml')[0] if corresponding_file not in files: files.append(corresponding_file) for f in files: if '-meta.xml' in f: continue file_ext = f.split('.')[-1] metadata_type = util.get_meta_type_by_suffix(file_ext) if metadata_type['metaFile'] == True: corresponding_file = f + '-meta.xml' if corresponding_file not in files: files.append(corresponding_file) metadata_package_dict = util.get_metadata_hash(files) tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True) package_xml = util.get_package_xml_contents(metadata_package_dict) util.put_package_xml_in_directory(tmp_unpackaged, package_xml, True) empty_package_xml = util.get_empty_package_xml_contents() util.put_empty_package_xml_in_directory(tmp_unpackaged, empty_package_xml) zip_file = util.zip_directory(tmp, tmp) purge_on_delete_setting = config.connection.get_plugin_client_setting("mm_purge_on_delete", False); if purge_on_delete_setting: describe_result = config.sfdc_client.describeMetadata(retXml=False) if describe_result.testRequired == True: purge_on_delete_setting = False deploy_params = { "zip_file" : zip_file, "rollback_on_error" : True, "ret_xml" : True, "purge_on_delete" : purge_on_delete_setting } delete_result = sfdc_client.delete(deploy_params) d = xmltodict.parse(delete_result,postprocessor=util.xmltodict_postprocessor) shutil.rmtree(tmp) result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result'] if result['success'] == True: removed = [] for f in files: try: file_ext = f.split('.')[-1] metadata_type = util.get_meta_type_by_suffix(file_ext) if metadata_type == None or not 'directoryName' in metadata_type: continue; directory = metadata_type['directoryName'] filepath = os.path.join(project.location, "src", directory, f) metapath = os.path.join(project.location, "src", directory, f + '-meta.xml') os.remove(filepath) os.remove(metapath) # remove the entry in file properties project.conflict_manager.remove_from_local_store(f) removed.append(f) except Exception, e: print e.message return util.generate_success_response("Removed metadata files: " + (",".join(removed)))
def execute(self): number_of_checkpoints = 0 #user_id = self.params.get('user_id', config.sfdc_client.user_id) limit = self.params.get('limit', 20) checkpoint_results = config.sfdc_client.get_apex_checkpoint_results( config.sfdc_client.user_id, limit) if 'records' in checkpoint_results: number_of_checkpoints = len(checkpoint_results['records']) if os.path.isdir( os.path.join(config.project.location, "debug", "checkpoints")): shutil.rmtree( os.path.join(config.project.location, "debug", "checkpoints")) os.makedirs( os.path.join(config.project.location, "debug", "checkpoints")) apex_entity_to_lines = {} for r in checkpoint_results['records']: if 'HeapDump' in r and 'className' in r['HeapDump']: if r['HeapDump']['className'] not in apex_entity_to_lines: apex_entity_to_lines[r['HeapDump']['className']] = [ r['Line'] ] else: apex_entity_to_lines[r['HeapDump'] ['className']].append(r['Line']) for apex_entity_name, lines in apex_entity_to_lines.items(): if not os.path.isdir( os.path.join(config.project.location, "debug", "checkpoints", apex_entity_name)): os.makedirs( os.path.join(config.project.location, "debug", "checkpoints", apex_entity_name)) for l in lines: if not os.path.isdir( os.path.join(config.project.location, "debug", "checkpoints", apex_entity_name, str(l))): os.makedirs( os.path.join(config.project.location, "debug", "checkpoints", apex_entity_name, str(l))) for r in checkpoint_results['records']: if 'HeapDump' in r and 'className' in r['HeapDump']: modstamp = r["HeapDump"]["heapDumpDate"] if config.is_windows: modstamp = modstamp.replace(':', ' ') file_name = modstamp + "-" + r["UserId"] + ".json" file_path = os.path.join(config.project.location, "debug", "checkpoints", r['HeapDump']['className'], str(r['Line']), file_name) src = open(file_path, "w") src.write(json.dumps(r, sort_keys=True, indent=4)) src.close() else: config.logger.debug("No checkpoints to download") return util.generate_success_response( str(number_of_checkpoints) + ' Checkpoints successfully downloaded')
def execute(self): config.project.update_credentials(self.params) return util.generate_success_response( 'Your credentials were updated successfully')
def execute(self): archive_deployments = config.connection.get_plugin_client_setting("mm_archive_deployments", True) finish_deploy = self.params.get('finish', False) compare = config.connection.get_plugin_client_setting("mm_compare_before_deployment", True) destinations = self.params['destinations'] deploy_metadata = config.sfdc_client.retrieve(package=self.params['package']) deploy_name = self.params.get('new_deployment_name', None) threads = [] if not finish_deploy and compare: source_retrieve_result = config.sfdc_client.retrieve(package=self.params['package']) debug('source_retrieve_result') debug(source_retrieve_result) source_dict = {} for fp in source_retrieve_result.fileProperties: source_dict[fp.fileName] = fp debug('source_dict') debug(source_dict) #need to compare package.xml to destination orgs here for destination in destinations: thread = CompareHandler(config.project, destination, self.params, self.params['package']) threads.append(thread) thread.start() compare_results = [] for thread in threads: thread.join() compare_results.append(thread.result) debug('compare_results') debug(compare_results) destination_dict = {} for cr in compare_results: cr_dict = {} for fpfp in cr.fileProperties: cr_dict[fpfp.fileName] = fpfp destination_dict[cr.username] = cr_dict debug('destination_dict') debug(destination_dict) final_compare_result = {} for d in destinations: final_compare_result[d['username']] = {} for file_name, file_details in source_dict.iteritems(): if 'package.xml' in file_name: continue; for username, username_value in destination_dict.iteritems(): destination_retrieve_details = destination_dict[username] if 'package.xml' in file_name: continue short_file_name = file_name.split('/')[-1] mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1]) if file_name not in destination_retrieve_details: final_compare_result[username][file_name] = { 'name' : short_file_name, 'type' : mtype['xmlName'], 'action': 'insert', 'message' : 'Create' } else: destination_file_detail = destination_retrieve_details[file_name] source_file_detail = source_dict[file_name] if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate: final_compare_result[username][file_name] = { 'name' : short_file_name, 'type' : mtype['xmlName'], 'action' : 'update', 'message' : 'You will overwrite this file' } else: final_compare_result[username][file_name] = { 'name' : short_file_name, 'type' : mtype['xmlName'], 'action' : 'update_conflict', 'message' : 'Destination file is newer than source file' } # final_compare_result = {} # for d in destinations: # final_compare_result[d['username']] = {} # for username, username_value in destination_dict.iteritems(): # #destination_dict = destination_dict[username] # for file_name, file_details in username_value.iteritems(): # if 'package.xml' in file_name: # continue; # short_file_name = file_name.split('/')[-1] # mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1]) # if file_name not in source_dict: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action': 'insert', # 'message' : 'Create' # } # else: # destination_file_detail = username_value[file_name] # source_file_detail = source_dict[file_name] # if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action' : 'update', # 'message' : 'You will overwrite this file' # } # else: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action' : 'update_conflict', # 'message' : 'Destination file is newer than source file' # } debug('final_compare_result') debug(final_compare_result) if self.args.respond_with_html == True: html = util.generate_html_response('deploy_compare', final_compare_result, self.params) response = json.loads(util.generate_success_response(html, "html")) response['compare_success'] = True # if deployment to one org fails, the entire deploy was not successful # for result in final_compare_result: # if result['success'] == False: # response['compare_success'] = False # break return json.dumps(response) else: return json.dumps(final_compare_result,indent=4) for destination in destinations: if archive_deployments: deploy_path = os.path.join(config.project.location,"deploy",destination['username']) if not os.path.exists(deploy_path): os.makedirs(deploy_path) if not os.path.isfile(os.path.join(config.project.location,"deploy",'.config')): config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb') config_file_contents = { 'deployments' : { 'named' : [], 'timestamped' : [] } } config_file.write(json.dumps(config_file_contents)) config_file.close() ts = time.time() if not config.is_windows: timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') else: timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H %M %S') if deploy_name: if os.path.isdir(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)): shutil.rmtree(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)) os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)) util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],deploy_name)) config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config')) named_deployment = { 'destination' : destination['username'], 'name' : deploy_name, 'timestamp' : timestamp, 'id' : util.get_random_string(30), 'package' : os.path.join(config.project.location,"deploy",destination['username'],deploy_name,'unpackaged','package.xml') } config_file_json['deployments']['named'].append(named_deployment) config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() else: os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],timestamp)) util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],timestamp)) config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config')) timestamped_deployment = { 'destination' : destination['username'], 'timestamp' : timestamp, 'id' : util.get_random_string(30), 'package' : os.path.join(config.project.location,"deploy",destination['username'],timestamp,'unpackaged','package.xml') } config_file_json['deployments']['timestamped'].append(timestamped_deployment) config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata) threads.append(thread) thread.start() deploy_results = [] for thread in threads: thread.join() deploy_results.append(thread.result) if self.args.respond_with_html == True: html = util.generate_html_response(self.args.operation, deploy_results, self.params) response = json.loads(util.generate_success_response(html, "html")) response['deploy_success'] = True # if deployment to one org fails, the entire deploy was not successful for result in deploy_results: if result['success'] == False: response['deploy_success'] = False break return json.dumps(response) else: return json.dumps(deploy_results,index=4)
def execute(self): archive_deployments = config.connection.get_plugin_client_setting( "mm_archive_deployments", True) finish_deploy = self.params.get('finish', False) compare = config.connection.get_plugin_client_setting( "mm_compare_before_deployment", True) destinations = self.params['destinations'] deploy_metadata = config.sfdc_client.retrieve( package=self.params['package']) deploy_name = self.params.get('new_deployment_name', None) threads = [] if not finish_deploy and compare: source_retrieve_result = config.sfdc_client.retrieve( package=self.params['package']) debug('source_retrieve_result') debug(source_retrieve_result) source_dict = {} for fp in source_retrieve_result.fileProperties: source_dict[fp.fileName] = fp debug('source_dict') debug(source_dict) #need to compare package.xml to destination orgs here for destination in destinations: thread = CompareHandler(config.project, destination, self.params, self.params['package']) threads.append(thread) thread.start() compare_results = [] for thread in threads: thread.join() compare_results.append(thread.result) debug('compare_results') debug(compare_results) destination_dict = {} for cr in compare_results: cr_dict = {} for fpfp in cr.fileProperties: cr_dict[fpfp.fileName] = fpfp destination_dict[cr.username] = cr_dict debug('destination_dict') debug(destination_dict) final_compare_result = {} for d in destinations: final_compare_result[d['username']] = {} for file_name, file_details in source_dict.iteritems(): if 'package.xml' in file_name: continue for username, username_value in destination_dict.iteritems(): destination_retrieve_details = destination_dict[username] if 'package.xml' in file_name: continue short_file_name = file_name.split('/')[-1] mtype = util.get_meta_type_by_suffix( short_file_name.split('.')[-1]) if file_name not in destination_retrieve_details: final_compare_result[username][file_name] = { 'name': short_file_name, 'type': mtype['xmlName'], 'action': 'insert', 'message': 'Create' } else: destination_file_detail = destination_retrieve_details[ file_name] source_file_detail = source_dict[file_name] if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate: final_compare_result[username][file_name] = { 'name': short_file_name, 'type': mtype['xmlName'], 'action': 'update', 'message': 'You will overwrite this file' } else: final_compare_result[username][file_name] = { 'name': short_file_name, 'type': mtype['xmlName'], 'action': 'update_conflict', 'message': 'Destination file is newer than source file' } # final_compare_result = {} # for d in destinations: # final_compare_result[d['username']] = {} # for username, username_value in destination_dict.iteritems(): # #destination_dict = destination_dict[username] # for file_name, file_details in username_value.iteritems(): # if 'package.xml' in file_name: # continue; # short_file_name = file_name.split('/')[-1] # mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1]) # if file_name not in source_dict: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action': 'insert', # 'message' : 'Create' # } # else: # destination_file_detail = username_value[file_name] # source_file_detail = source_dict[file_name] # if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action' : 'update', # 'message' : 'You will overwrite this file' # } # else: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action' : 'update_conflict', # 'message' : 'Destination file is newer than source file' # } debug('final_compare_result') debug(final_compare_result) if self.args.respond_with_html == True: html = util.generate_html_response('deploy_compare', final_compare_result, self.params) response = json.loads( util.generate_success_response(html, "html")) response['compare_success'] = True # if deployment to one org fails, the entire deploy was not successful # for result in final_compare_result: # if result['success'] == False: # response['compare_success'] = False # break return json.dumps(response) else: return json.dumps(final_compare_result, indent=4) for destination in destinations: if archive_deployments: deploy_path = os.path.join(config.project.location, "deploy", destination['username']) if not os.path.exists(deploy_path): os.makedirs(deploy_path) if not os.path.isfile( os.path.join(config.project.location, "deploy", '.config')): config_file = open( os.path.join(config.project.location, "deploy", '.config'), 'wb') config_file_contents = { 'deployments': { 'named': [], 'timestamped': [] } } config_file.write(json.dumps(config_file_contents)) config_file.close() ts = time.time() if not config.is_windows: timestamp = datetime.datetime.fromtimestamp(ts).strftime( '%Y-%m-%d %H:%M:%S') else: timestamp = datetime.datetime.fromtimestamp(ts).strftime( '%Y-%m-%d %H %M %S') if deploy_name: if os.path.isdir( os.path.join(config.project.location, "deploy", destination['username'], deploy_name)): shutil.rmtree( os.path.join(config.project.location, "deploy", destination['username'], deploy_name)) os.makedirs( os.path.join(config.project.location, "deploy", destination['username'], deploy_name)) util.extract_base64_encoded_zip( deploy_metadata.zipFile, os.path.join(config.project.location, "deploy", destination['username'], deploy_name)) config_file_json = util.parse_json_from_file( os.path.join(config.project.location, "deploy", '.config')) named_deployment = { 'destination': destination['username'], 'name': deploy_name, 'timestamp': timestamp, 'id': util.get_random_string(30), 'package': os.path.join(config.project.location, "deploy", destination['username'], deploy_name, 'unpackaged', 'package.xml') } config_file_json['deployments']['named'].append( named_deployment) config_file = open( os.path.join(config.project.location, "deploy", '.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() else: os.makedirs( os.path.join(config.project.location, "deploy", destination['username'], timestamp)) util.extract_base64_encoded_zip( deploy_metadata.zipFile, os.path.join(config.project.location, "deploy", destination['username'], timestamp)) config_file_json = util.parse_json_from_file( os.path.join(config.project.location, "deploy", '.config')) timestamped_deployment = { 'destination': destination['username'], 'timestamp': timestamp, 'id': util.get_random_string(30), 'package': os.path.join(config.project.location, "deploy", destination['username'], timestamp, 'unpackaged', 'package.xml') } config_file_json['deployments']['timestamped'].append( timestamped_deployment) config_file = open( os.path.join(config.project.location, "deploy", '.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata) threads.append(thread) thread.start() deploy_results = [] for thread in threads: thread.join() deploy_results.append(thread.result) if self.args.respond_with_html == True: html = util.generate_html_response(self.args.operation, deploy_results, self.params) response = json.loads(util.generate_success_response(html, "html")) response['deploy_success'] = True # if deployment to one org fails, the entire deploy was not successful for result in deploy_results: if result['success'] == False: response['deploy_success'] = False break return json.dumps(response) else: return json.dumps(deploy_results, index=4)
os.unlink(file_path) except Exception, e: print e number_of_logs = len(logs) for log in logs: modstamp = log["modstamp"] if config.is_windows: modstamp = modstamp.replace(':', ' ') file_name = modstamp+"-"+log["userid"]+".log" src = open(os.path.join(config.connection.workspace,config.project.project_name,"debug","logs",file_name), "w") src.write(log["log"]) src.close() else: config.logger.debug("No logs to download") return util.generate_success_response(str(number_of_logs)+' Logs successfully downloaded') class NewTraceFlagCommand(Command): aliases=["new_log"] def execute(self): """ params = { "ApexCode" : "None", "ApexProfiling" : "01pd0000001yXtYAAU", "Callout" : True, "Database" : 1, "ExpirationDate" : 3, "ScopeId" : "", "System" : "", "TracedEntityId" : "", "Validation" : "",
number_of_logs = len(logs) for log in logs: modstamp = log["modstamp"] if config.is_windows: modstamp = modstamp.replace(':', ' ') file_name = modstamp + "-" + log["userid"] + ".log" src = open( os.path.join(config.connection.workspace, config.project.project_name, "debug", "logs", file_name), "w") src.write(log["log"]) src.close() else: config.logger.debug("No logs to download") return util.generate_success_response( str(number_of_logs) + ' Logs successfully downloaded') class NewTraceFlagCommand(Command): aliases = ["new_log"] def execute(self): """ params = { "ApexCode" : "None", "ApexProfiling" : "01pd0000001yXtYAAU", "Callout" : True, "Database" : 1, "ExpirationDate" : 3, "ScopeId" : "", "System" : "",
def execute(self): users = self.params.get('users', None) levels = self.params.get('debug_categories', None) expiration = self.params.get('expiration', None) config.project.put_debug_file(users, levels, expiration) return util.generate_success_response("Debug settings updated successfully")