def execute(self): if self.args.respond_with_html == True: health_check_dict = config.project.run_health_check() html = util.generate_html_response(self.args.operation, health_check_dict) return util.generate_success_response(html, "html") else: return json.dumps(config.project.run_health_check(), indent=4)
def execute(self): sfdc_client = config.sfdc_client empty_package_xml = util.get_empty_package_xml_contents() tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True) util.put_empty_package_xml_in_directory(tmp_unpackaged, empty_package_xml) zip_file = util.zip_directory(tmp, tmp) deploy_params = { "zip_file" : zip_file, "rollback_on_error" : True, "ret_xml" : True, "classes" : self.params.get('classes', []), "debug_categories" : self.params.get('debug_categories', []) } deploy_result = sfdc_client.deploy(deploy_params,is_test=True) #debug(deploy_result) d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor) if int(float(util.SFDC_API_VERSION)) >= 29: result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']['details']['runTestResult'] else: result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']['runTestResult'] try: result['log'] = d["soapenv:Envelope"]["soapenv:Header"]["DebuggingInfo"]["debugLog"] except: result['log'] = 'Log not available.' shutil.rmtree(tmp) if self.args.respond_with_html: html = util.generate_html_response(self.args.operation, result, self.params) return util.generate_success_response(html, "html") else: return result
def execute(self): if self.args.respond_with_html == True: health_check_dict = config.project.run_health_check() html = util.generate_html_response(self.args.operation, health_check_dict) return util.generate_success_response(html, "html") else: return json.dumps(config.project.run_health_check(),indent=4)
def execute(self): if int(float(util.SFDC_API_VERSION)) <= 28 or config.connection.get_plugin_client_setting("mm_use_legacy_test_ui", False): #raise MMException("This command requires mm_api_version to be set to 29.0 or higher.") return RunUnitTestsCommand(params=self.params,args=self.args).execute() project = config.project sfdc_client = config.sfdc_client generate_logs = self.params.get("generate_logs", False) if generate_logs: NewQuickTraceFlagCommand(params={"running_user_only":True}).execute() test_classes = self.params.get("classes", None) debug('running tests for') debug(test_classes) if test_classes == None or test_classes == []: #need to run all tests in project classes = [] triggers = [] test_classes = [] for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"src","classes")): for filename in filenames: if "test" in filename.lower() and "-meta.xml" not in filename: test_classes.append(util.get_file_name_no_extension(filename)) elif "-meta.xml" not in filename: classes.append(util.get_file_name_no_extension(filename)) for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"src","triggers")): for filename in filenames: if "-meta.xml" not in filename: triggers.append(util.get_file_name_no_extension(filename)) else: #user has specified certain tests to run classes = [] triggers = [] for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"src","classes")): for filename in filenames: if "test" not in filename.lower() and "-meta.xml" not in filename: classes.append(util.get_file_name_no_extension(filename)) for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"src","triggers")): for filename in filenames: if "-meta.xml" not in filename: triggers.append(util.get_file_name_no_extension(filename)) params = { "files" : test_classes } test_results = sfdc_client.run_async_apex_tests(params, False) params = { "classes" : classes, "triggers" : triggers, "test_classes" : test_classes } coverage_report = sfdc_client.get_apex_test_coverage(params, transform_ids=True) debug(">>>>>>>>>>") debug(coverage_report) result = { "test_results" : test_results, "coverage" : coverage_report } if self.args.respond_with_html: html = util.generate_html_response(self.args.operation, result, self.params) return util.generate_success_response(html, "html") else: return result
def execute(self): archive_deployments = config.connection.get_plugin_client_setting( "mm_archive_deployments", True) deploy_metadata = config.sfdc_client.retrieve( package=self.params['package']) threads = [] for destination in self.params['destinations']: if archive_deployments: deploy_path = os.path.join(config.project.location, "deploy", destination['username']) if not os.path.exists(deploy_path): os.makedirs(deploy_path) ts = time.time() if not config.is_windows: timestamp = datetime.datetime.fromtimestamp(ts).strftime( '%Y-%m-%d %H:%M:%S') else: timestamp = datetime.datetime.fromtimestamp(ts).strftime( '%Y-%m-%d %H %M %S') os.makedirs( os.path.join(config.project.location, "deploy", destination['username'], timestamp)) util.extract_base64_encoded_zip( deploy_metadata.zipFile, os.path.join(config.project.location, "deploy", destination['username'], timestamp)) thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata) threads.append(thread) thread.start() deploy_results = [] for thread in threads: thread.join() deploy_results.append(thread.result) if self.args.respond_with_html == True: html = util.generate_html_response(self.args.operation, deploy_results, self.params) response = json.loads(util.generate_success_response(html, "html")) response['deploy_success'] = True # if deployment to one org fails, the entire deploy was not successful for result in deploy_results: if result['success'] == False: response['deploy_success'] = False break return json.dumps(response) else: return json.dumps(deploy_results, index=4)
def execute(self): archive_deployments = config.connection.get_plugin_client_setting("mm_archive_deployments", True) deploy_metadata = config.sfdc_client.retrieve(package=self.params['package']) threads = [] for destination in self.params['destinations']: if archive_deployments: deploy_path = os.path.join(config.project.location,"deploy",destination['username']) if not os.path.exists(deploy_path): os.makedirs(deploy_path) ts = time.time() if not config.is_windows: timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') else: timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H %M %S') os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],timestamp)) util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],timestamp)) thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata) threads.append(thread) thread.start() deploy_results = [] for thread in threads: thread.join() deploy_results.append(thread.result) if self.args.respond_with_html == True: html = util.generate_html_response(self.args.operation, deploy_results, self.params) response = json.loads(util.generate_success_response(html, "html")) response['deploy_success'] = True # if deployment to one org fails, the entire deploy was not successful for result in deploy_results: if result['success'] == False: response['deploy_success'] = False break return json.dumps(response) else: return json.dumps(deploy_results,index=4)
def execute(self): archive_deployments = config.connection.get_plugin_client_setting( "mm_archive_deployments", True) finish_deploy = self.params.get('finish', False) compare = config.connection.get_plugin_client_setting( "mm_compare_before_deployment", True) destinations = self.params['destinations'] deploy_metadata = config.sfdc_client.retrieve( package=self.params['package']) deploy_name = self.params.get('new_deployment_name', None) threads = [] if not finish_deploy and compare: source_retrieve_result = config.sfdc_client.retrieve( package=self.params['package']) debug('source_retrieve_result') debug(source_retrieve_result) source_dict = {} for fp in source_retrieve_result.fileProperties: source_dict[fp.fileName] = fp debug('source_dict') debug(source_dict) #need to compare package.xml to destination orgs here for destination in destinations: thread = CompareHandler(config.project, destination, self.params, self.params['package']) threads.append(thread) thread.start() compare_results = [] for thread in threads: thread.join() compare_results.append(thread.result) debug('compare_results') debug(compare_results) destination_dict = {} for cr in compare_results: cr_dict = {} for fpfp in cr.fileProperties: cr_dict[fpfp.fileName] = fpfp destination_dict[cr.username] = cr_dict debug('destination_dict') debug(destination_dict) final_compare_result = {} for d in destinations: final_compare_result[d['username']] = {} for file_name, file_details in source_dict.iteritems(): if 'package.xml' in file_name: continue for username, username_value in destination_dict.iteritems(): destination_retrieve_details = destination_dict[username] if 'package.xml' in file_name: continue short_file_name = file_name.split('/')[-1] mtype = util.get_meta_type_by_suffix( short_file_name.split('.')[-1]) if file_name not in destination_retrieve_details: final_compare_result[username][file_name] = { 'name': short_file_name, 'type': mtype['xmlName'], 'action': 'insert', 'message': 'Create' } else: destination_file_detail = destination_retrieve_details[ file_name] source_file_detail = source_dict[file_name] if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate: final_compare_result[username][file_name] = { 'name': short_file_name, 'type': mtype['xmlName'], 'action': 'update', 'message': 'You will overwrite this file' } else: final_compare_result[username][file_name] = { 'name': short_file_name, 'type': mtype['xmlName'], 'action': 'update_conflict', 'message': 'Destination file is newer than source file' } # final_compare_result = {} # for d in destinations: # final_compare_result[d['username']] = {} # for username, username_value in destination_dict.iteritems(): # #destination_dict = destination_dict[username] # for file_name, file_details in username_value.iteritems(): # if 'package.xml' in file_name: # continue; # short_file_name = file_name.split('/')[-1] # mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1]) # if file_name not in source_dict: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action': 'insert', # 'message' : 'Create' # } # else: # destination_file_detail = username_value[file_name] # source_file_detail = source_dict[file_name] # if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action' : 'update', # 'message' : 'You will overwrite this file' # } # else: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action' : 'update_conflict', # 'message' : 'Destination file is newer than source file' # } debug('final_compare_result') debug(final_compare_result) if self.args.respond_with_html == True: html = util.generate_html_response('deploy_compare', final_compare_result, self.params) response = json.loads( util.generate_success_response(html, "html")) response['compare_success'] = True # if deployment to one org fails, the entire deploy was not successful # for result in final_compare_result: # if result['success'] == False: # response['compare_success'] = False # break return json.dumps(response) else: return json.dumps(final_compare_result, indent=4) for destination in destinations: if archive_deployments: deploy_path = os.path.join(config.project.location, "deploy", destination['username']) if not os.path.exists(deploy_path): os.makedirs(deploy_path) if not os.path.isfile( os.path.join(config.project.location, "deploy", '.config')): config_file = open( os.path.join(config.project.location, "deploy", '.config'), 'wb') config_file_contents = { 'deployments': { 'named': [], 'timestamped': [] } } config_file.write(json.dumps(config_file_contents)) config_file.close() ts = time.time() if not config.is_windows: timestamp = datetime.datetime.fromtimestamp(ts).strftime( '%Y-%m-%d %H:%M:%S') else: timestamp = datetime.datetime.fromtimestamp(ts).strftime( '%Y-%m-%d %H %M %S') if deploy_name: if os.path.isdir( os.path.join(config.project.location, "deploy", destination['username'], deploy_name)): shutil.rmtree( os.path.join(config.project.location, "deploy", destination['username'], deploy_name)) os.makedirs( os.path.join(config.project.location, "deploy", destination['username'], deploy_name)) util.extract_base64_encoded_zip( deploy_metadata.zipFile, os.path.join(config.project.location, "deploy", destination['username'], deploy_name)) config_file_json = util.parse_json_from_file( os.path.join(config.project.location, "deploy", '.config')) named_deployment = { 'destination': destination['username'], 'name': deploy_name, 'timestamp': timestamp, 'id': util.get_random_string(30), 'package': os.path.join(config.project.location, "deploy", destination['username'], deploy_name, 'unpackaged', 'package.xml') } config_file_json['deployments']['named'].append( named_deployment) config_file = open( os.path.join(config.project.location, "deploy", '.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() else: os.makedirs( os.path.join(config.project.location, "deploy", destination['username'], timestamp)) util.extract_base64_encoded_zip( deploy_metadata.zipFile, os.path.join(config.project.location, "deploy", destination['username'], timestamp)) config_file_json = util.parse_json_from_file( os.path.join(config.project.location, "deploy", '.config')) timestamped_deployment = { 'destination': destination['username'], 'timestamp': timestamp, 'id': util.get_random_string(30), 'package': os.path.join(config.project.location, "deploy", destination['username'], timestamp, 'unpackaged', 'package.xml') } config_file_json['deployments']['timestamped'].append( timestamped_deployment) config_file = open( os.path.join(config.project.location, "deploy", '.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata) threads.append(thread) thread.start() deploy_results = [] for thread in threads: thread.join() deploy_results.append(thread.result) if self.args.respond_with_html == True: html = util.generate_html_response(self.args.operation, deploy_results, self.params) response = json.loads(util.generate_success_response(html, "html")) response['deploy_success'] = True # if deployment to one org fails, the entire deploy was not successful for result in deploy_results: if result['success'] == False: response['deploy_success'] = False break return json.dumps(response) else: return json.dumps(deploy_results, index=4)
def execute(self): archive_deployments = config.connection.get_plugin_client_setting("mm_archive_deployments", True) finish_deploy = self.params.get('finish', False) compare = config.connection.get_plugin_client_setting("mm_compare_before_deployment", True) destinations = self.params['destinations'] deploy_metadata = config.sfdc_client.retrieve(package=self.params['package']) deploy_name = self.params.get('new_deployment_name', None) threads = [] if not finish_deploy and compare: source_retrieve_result = config.sfdc_client.retrieve(package=self.params['package']) debug('source_retrieve_result') debug(source_retrieve_result) source_dict = {} for fp in source_retrieve_result.fileProperties: source_dict[fp.fileName] = fp debug('source_dict') debug(source_dict) #need to compare package.xml to destination orgs here for destination in destinations: thread = CompareHandler(config.project, destination, self.params, self.params['package']) threads.append(thread) thread.start() compare_results = [] for thread in threads: thread.join() compare_results.append(thread.result) debug('compare_results') debug(compare_results) destination_dict = {} for cr in compare_results: cr_dict = {} for fpfp in cr.fileProperties: cr_dict[fpfp.fileName] = fpfp destination_dict[cr.username] = cr_dict debug('destination_dict') debug(destination_dict) final_compare_result = {} for d in destinations: final_compare_result[d['username']] = {} for file_name, file_details in source_dict.iteritems(): if 'package.xml' in file_name: continue; for username, username_value in destination_dict.iteritems(): destination_retrieve_details = destination_dict[username] if 'package.xml' in file_name: continue short_file_name = file_name.split('/')[-1] mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1]) if file_name not in destination_retrieve_details: final_compare_result[username][file_name] = { 'name' : short_file_name, 'type' : mtype['xmlName'], 'action': 'insert', 'message' : 'Create' } else: destination_file_detail = destination_retrieve_details[file_name] source_file_detail = source_dict[file_name] if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate: final_compare_result[username][file_name] = { 'name' : short_file_name, 'type' : mtype['xmlName'], 'action' : 'update', 'message' : 'You will overwrite this file' } else: final_compare_result[username][file_name] = { 'name' : short_file_name, 'type' : mtype['xmlName'], 'action' : 'update_conflict', 'message' : 'Destination file is newer than source file' } # final_compare_result = {} # for d in destinations: # final_compare_result[d['username']] = {} # for username, username_value in destination_dict.iteritems(): # #destination_dict = destination_dict[username] # for file_name, file_details in username_value.iteritems(): # if 'package.xml' in file_name: # continue; # short_file_name = file_name.split('/')[-1] # mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1]) # if file_name not in source_dict: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action': 'insert', # 'message' : 'Create' # } # else: # destination_file_detail = username_value[file_name] # source_file_detail = source_dict[file_name] # if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action' : 'update', # 'message' : 'You will overwrite this file' # } # else: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action' : 'update_conflict', # 'message' : 'Destination file is newer than source file' # } debug('final_compare_result') debug(final_compare_result) if self.args.respond_with_html == True: html = util.generate_html_response('deploy_compare', final_compare_result, self.params) response = json.loads(util.generate_success_response(html, "html")) response['compare_success'] = True # if deployment to one org fails, the entire deploy was not successful # for result in final_compare_result: # if result['success'] == False: # response['compare_success'] = False # break return json.dumps(response) else: return json.dumps(final_compare_result,indent=4) for destination in destinations: if archive_deployments: deploy_path = os.path.join(config.project.location,"deploy",destination['username']) if not os.path.exists(deploy_path): os.makedirs(deploy_path) if not os.path.isfile(os.path.join(config.project.location,"deploy",'.config')): config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb') config_file_contents = { 'deployments' : { 'named' : [], 'timestamped' : [] } } config_file.write(json.dumps(config_file_contents)) config_file.close() ts = time.time() if not config.is_windows: timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') else: timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H %M %S') if deploy_name: if os.path.isdir(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)): shutil.rmtree(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)) os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)) util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],deploy_name)) config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config')) named_deployment = { 'destination' : destination['username'], 'name' : deploy_name, 'timestamp' : timestamp, 'id' : util.get_random_string(30), 'package' : os.path.join(config.project.location,"deploy",destination['username'],deploy_name,'unpackaged','package.xml') } config_file_json['deployments']['named'].append(named_deployment) config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() else: os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],timestamp)) util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],timestamp)) config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config')) timestamped_deployment = { 'destination' : destination['username'], 'timestamp' : timestamp, 'id' : util.get_random_string(30), 'package' : os.path.join(config.project.location,"deploy",destination['username'],timestamp,'unpackaged','package.xml') } config_file_json['deployments']['timestamped'].append(timestamped_deployment) config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata) threads.append(thread) thread.start() deploy_results = [] for thread in threads: thread.join() deploy_results.append(thread.result) if self.args.respond_with_html == True: html = util.generate_html_response(self.args.operation, deploy_results, self.params) response = json.loads(util.generate_success_response(html, "html")) response['deploy_success'] = True # if deployment to one org fails, the entire deploy was not successful for result in deploy_results: if result['success'] == False: response['deploy_success'] = False break return json.dumps(response) else: return json.dumps(deploy_results,index=4)