def execute(self): file_name = self.params["file_name"] extension = util.get_file_extension_no_period(file_name) mtype = util.get_meta_type_by_suffix(extension) full_file_path = os.path.join(config.project.location, "src", mtype["directoryName"], file_name) params = { "project_name" : config.project.project_name, "file_name" : full_file_path, "line_number" : self.params.get("line_number", 0) } config.connection.run_subl_command("open_file_in_project", json.dumps(params)) return util.generate_success_response("ok")
def execute(self): project = config.project sfdc_client = config.sfdc_client files = self.params.get('files', None) for f in files: if '-meta.xml' in f: corresponding_file = f.split('-meta.xml')[0] if corresponding_file not in files: files.append(corresponding_file) for f in files: if '-meta.xml' in f: continue file_ext = f.split('.')[-1] metadata_type = util.get_meta_type_by_suffix(file_ext) if metadata_type['metaFile'] == True: corresponding_file = f + '-meta.xml' if corresponding_file not in files: files.append(corresponding_file) metadata_package_dict = util.get_metadata_hash(files) tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True) package_xml = util.get_package_xml_contents(metadata_package_dict) util.put_package_xml_in_directory(tmp_unpackaged, package_xml, True) empty_package_xml = util.get_empty_package_xml_contents() util.put_empty_package_xml_in_directory(tmp_unpackaged, empty_package_xml) zip_file = util.zip_directory(tmp, tmp) purge_on_delete_setting = config.connection.get_plugin_client_setting("mm_purge_on_delete", False); if purge_on_delete_setting: describe_result = config.sfdc_client.describeMetadata(retXml=False) if describe_result.testRequired == True: purge_on_delete_setting = False deploy_params = { "zip_file" : zip_file, "rollback_on_error" : True, "ret_xml" : True, "purge_on_delete" : purge_on_delete_setting } delete_result = sfdc_client.delete(deploy_params) d = xmltodict.parse(delete_result,postprocessor=util.xmltodict_postprocessor) shutil.rmtree(tmp) result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result'] if result['success'] == True: removed = [] for f in files: try: file_ext = f.split('.')[-1] metadata_type = util.get_meta_type_by_suffix(file_ext) if metadata_type == None or not 'directoryName' in metadata_type: continue; directory = metadata_type['directoryName'] filepath = os.path.join(project.location, "src", directory, f) metapath = os.path.join(project.location, "src", directory, f + '-meta.xml') os.remove(filepath) os.remove(metapath) # remove the entry in file properties project.conflict_manager.remove_from_local_store(f) removed.append(f) except Exception, e: print e.message return util.generate_success_response("Removed metadata files: " + (",".join(removed)))
def execute(self): project = config.project files = self.params.get('files', None) use_tooling_api = config.connection.get_plugin_client_setting('mm_compile_with_tooling_api', False) check_for_conflicts = config.connection.get_plugin_client_setting('mm_compile_check_conflicts', False) compiling_apex_metadata = True for f in files: if f.split('.')[-1] not in util.TOOLING_API_EXTENSIONS: #cannot use tooling api compiling_apex_metadata = False break #when compiling apex metadata, check to see if it is newer on the server if check_for_conflicts and compiling_apex_metadata: if 'action' not in self.params or self.params['action'] != 'overwrite': has_conflict, msg = config.project.conflict_manager.check_for_conflicts(files) if has_conflict: return msg #use tooling api here, if possible if use_tooling_api == True and compiling_apex_metadata and int(float(util.SFDC_API_VERSION)) >= 27: if 'metadata_container' not in project.settings or project.settings['metadata_container'] == None: container_id = project.sfdc_client.get_metadata_container_id() new_settings = project.settings new_settings['metadata_container'] = container_id project.put_settings_file(new_settings) else: container_id = project.settings['metadata_container'] file_ext = files[0].split('.')[-1] try: result = project.sfdc_client.compile_with_tooling_api(files, container_id) except MetadataContainerException as e: project.sfdc_client.delete_mavensmate_metadatacontainers_for_this_user() response = project.sfdc_client.new_metadatacontainer_for_this_user() project.update_setting("metadata_container",response["id"]) return CompileSelectedMetadataCommand(params=self.params,args=self.args).execute() if 'Id' in result and 'State' in result: if result['State'] == 'Completed': project.conflict_manager.refresh_local_store(files=files) return util.generate_response(result) #the user has either chosen not to use the tooling api, or it's non apex metadata else: try: for f in files: if '-meta.xml' in f: corresponding_file = f.split('-meta.xml')[0] if corresponding_file not in files: files.append(corresponding_file) for f in files: if '-meta.xml' in f: continue file_ext = f.split('.')[-1] metadata_type = util.get_meta_type_by_suffix(file_ext) if metadata_type == None: if sys.platform == "win32": dir_parts = f.split("\\") else: dir_parts = f.split("/") if 'documents' in dir_parts: metadata_type = util.get_meta_type_by_name("Document") if metadata_type != None and 'metaFile' in metadata_type and metadata_type['metaFile'] == True: corresponding_file = f + '-meta.xml' if corresponding_file not in files: files.append(corresponding_file) metadata_package_dict = util.get_metadata_hash(files) #debug(metadata_package_dict) tmp = util.put_tmp_directory_on_disk() os.makedirs(os.path.join(tmp,"unpackaged")) #copy files from project directory to tmp for full_file_path in files: if 'package.xml' in full_file_path: continue if config.is_windows: destination = os.path.join(tmp,'unpackaged',full_file_path.split('\src\\')[1]) else: destination = os.path.join(tmp,'unpackaged',full_file_path.split('/src/')[1]) destination_directory = os.path.dirname(destination) if not os.path.exists(destination_directory): os.makedirs(destination_directory) shutil.copy2(full_file_path, destination_directory) package_xml = util.get_package_xml_contents(metadata_package_dict) util.put_package_xml_in_directory(os.path.join(tmp,"unpackaged"), package_xml) zip_file = util.zip_directory(tmp, tmp) deploy_params = { "zip_file" : zip_file, "rollback_on_error" : True, "ret_xml" : True } deploy_result = project.sfdc_client.deploy(deploy_params) d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor) result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result'] shutil.rmtree(tmp) # Get new properties for the files we just compiled if result['success'] == True: project.conflict_manager.refresh_local_store(files=files) return json.dumps(result) except Exception, e: try: shutil.rmtree(tmp) except: pass return util.generate_error_response(e.message)
def execute(self): project = config.project sfdc_client = config.sfdc_client metadata_type = self.params.get('metadata_type', None) api_name = self.params.get('api_name', None) apex_class_type = self.params.get('apex_class_type', None) apex_trigger_object_api_name = self.params.get('apex_trigger_object_api_name', None) apex_trigger_object_api_name = self.params.get('apex_trigger_object_api_name', None) github_template = self.params.get('github_template', None) if metadata_type == 'ApexClass' and apex_class_type == None: apex_class_type = 'default' if api_name == None: return util.generate_error_response("You must provide a name for the new metadata.") if sfdc_client.does_metadata_exist(object_type=metadata_type, name=api_name) == True: mt = util.get_meta_type_by_name(metadata_type) filepath = os.path.join(project.location, 'src', mt['directoryName'], api_name+'.'+mt['suffix']) fetched = "" if not os.path.exists(filepath): self.params['files'] = [filepath] refresh_selected_metadata(self.params) fetched = ", fetched metadata file from server" raise MMException("This API name is already in use in your org" + fetched + ".") tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True) util.put_skeleton_files_on_disk(metadata_type, api_name, tmp_unpackaged, apex_class_type, apex_trigger_object_api_name, github_template) package_xml_body = util.get_package_xml_contents({metadata_type : [ api_name ]}) util.put_package_xml_in_directory(tmp_unpackaged, package_xml_body) zip_file = util.zip_directory(tmp, tmp) deploy_params = { "zip_file" : zip_file, "rollback_on_error" : True, "ret_xml" : True } deploy_result = sfdc_client.deploy(deploy_params) d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor) meta_dir = "" files = [] path = None for dirname, dirnames, filenames in os.walk(tmp_unpackaged): for filename in filenames: if 'package.xml' in filename: continue full_file_path = os.path.join(dirname, filename) if '-meta.xml' in filename: extension = filename.replace('-meta.xml','').split(".")[-1] else: extension = filename.split(".")[-1] mt = util.get_meta_type_by_suffix(extension) if mt != None: meta_dir = mt['directoryName'] path = os.path.join(project.location, 'src', meta_dir) if not os.path.exists(path): os.makedirs(path) files.append(os.path.join(path, filename)) elif extension != "xml": continue; # only apex files and meta.xml files should make it to here shutil.copy(full_file_path, path) shutil.rmtree(tmp) project.update_package_xml_with_metadata(metadata_type, api_name) project.conflict_manager.refresh_local_store(files=files) return json.dumps(d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result'])
def execute(self): archive_deployments = config.connection.get_plugin_client_setting( "mm_archive_deployments", True) finish_deploy = self.params.get('finish', False) compare = config.connection.get_plugin_client_setting( "mm_compare_before_deployment", True) destinations = self.params['destinations'] deploy_metadata = config.sfdc_client.retrieve( package=self.params['package']) deploy_name = self.params.get('new_deployment_name', None) threads = [] if not finish_deploy and compare: source_retrieve_result = config.sfdc_client.retrieve( package=self.params['package']) debug('source_retrieve_result') debug(source_retrieve_result) source_dict = {} for fp in source_retrieve_result.fileProperties: source_dict[fp.fileName] = fp debug('source_dict') debug(source_dict) #need to compare package.xml to destination orgs here for destination in destinations: thread = CompareHandler(config.project, destination, self.params, self.params['package']) threads.append(thread) thread.start() compare_results = [] for thread in threads: thread.join() compare_results.append(thread.result) debug('compare_results') debug(compare_results) destination_dict = {} for cr in compare_results: cr_dict = {} for fpfp in cr.fileProperties: cr_dict[fpfp.fileName] = fpfp destination_dict[cr.username] = cr_dict debug('destination_dict') debug(destination_dict) final_compare_result = {} for d in destinations: final_compare_result[d['username']] = {} for file_name, file_details in source_dict.iteritems(): if 'package.xml' in file_name: continue for username, username_value in destination_dict.iteritems(): destination_retrieve_details = destination_dict[username] if 'package.xml' in file_name: continue short_file_name = file_name.split('/')[-1] mtype = util.get_meta_type_by_suffix( short_file_name.split('.')[-1]) if file_name not in destination_retrieve_details: final_compare_result[username][file_name] = { 'name': short_file_name, 'type': mtype['xmlName'], 'action': 'insert', 'message': 'Create' } else: destination_file_detail = destination_retrieve_details[ file_name] source_file_detail = source_dict[file_name] if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate: final_compare_result[username][file_name] = { 'name': short_file_name, 'type': mtype['xmlName'], 'action': 'update', 'message': 'You will overwrite this file' } else: final_compare_result[username][file_name] = { 'name': short_file_name, 'type': mtype['xmlName'], 'action': 'update_conflict', 'message': 'Destination file is newer than source file' } # final_compare_result = {} # for d in destinations: # final_compare_result[d['username']] = {} # for username, username_value in destination_dict.iteritems(): # #destination_dict = destination_dict[username] # for file_name, file_details in username_value.iteritems(): # if 'package.xml' in file_name: # continue; # short_file_name = file_name.split('/')[-1] # mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1]) # if file_name not in source_dict: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action': 'insert', # 'message' : 'Create' # } # else: # destination_file_detail = username_value[file_name] # source_file_detail = source_dict[file_name] # if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action' : 'update', # 'message' : 'You will overwrite this file' # } # else: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action' : 'update_conflict', # 'message' : 'Destination file is newer than source file' # } debug('final_compare_result') debug(final_compare_result) if self.args.respond_with_html == True: html = util.generate_html_response('deploy_compare', final_compare_result, self.params) response = json.loads( util.generate_success_response(html, "html")) response['compare_success'] = True # if deployment to one org fails, the entire deploy was not successful # for result in final_compare_result: # if result['success'] == False: # response['compare_success'] = False # break return json.dumps(response) else: return json.dumps(final_compare_result, indent=4) for destination in destinations: if archive_deployments: deploy_path = os.path.join(config.project.location, "deploy", destination['username']) if not os.path.exists(deploy_path): os.makedirs(deploy_path) if not os.path.isfile( os.path.join(config.project.location, "deploy", '.config')): config_file = open( os.path.join(config.project.location, "deploy", '.config'), 'wb') config_file_contents = { 'deployments': { 'named': [], 'timestamped': [] } } config_file.write(json.dumps(config_file_contents)) config_file.close() ts = time.time() if not config.is_windows: timestamp = datetime.datetime.fromtimestamp(ts).strftime( '%Y-%m-%d %H:%M:%S') else: timestamp = datetime.datetime.fromtimestamp(ts).strftime( '%Y-%m-%d %H %M %S') if deploy_name: if os.path.isdir( os.path.join(config.project.location, "deploy", destination['username'], deploy_name)): shutil.rmtree( os.path.join(config.project.location, "deploy", destination['username'], deploy_name)) os.makedirs( os.path.join(config.project.location, "deploy", destination['username'], deploy_name)) util.extract_base64_encoded_zip( deploy_metadata.zipFile, os.path.join(config.project.location, "deploy", destination['username'], deploy_name)) config_file_json = util.parse_json_from_file( os.path.join(config.project.location, "deploy", '.config')) named_deployment = { 'destination': destination['username'], 'name': deploy_name, 'timestamp': timestamp, 'id': util.get_random_string(30), 'package': os.path.join(config.project.location, "deploy", destination['username'], deploy_name, 'unpackaged', 'package.xml') } config_file_json['deployments']['named'].append( named_deployment) config_file = open( os.path.join(config.project.location, "deploy", '.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() else: os.makedirs( os.path.join(config.project.location, "deploy", destination['username'], timestamp)) util.extract_base64_encoded_zip( deploy_metadata.zipFile, os.path.join(config.project.location, "deploy", destination['username'], timestamp)) config_file_json = util.parse_json_from_file( os.path.join(config.project.location, "deploy", '.config')) timestamped_deployment = { 'destination': destination['username'], 'timestamp': timestamp, 'id': util.get_random_string(30), 'package': os.path.join(config.project.location, "deploy", destination['username'], timestamp, 'unpackaged', 'package.xml') } config_file_json['deployments']['timestamped'].append( timestamped_deployment) config_file = open( os.path.join(config.project.location, "deploy", '.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata) threads.append(thread) thread.start() deploy_results = [] for thread in threads: thread.join() deploy_results.append(thread.result) if self.args.respond_with_html == True: html = util.generate_html_response(self.args.operation, deploy_results, self.params) response = json.loads(util.generate_success_response(html, "html")) response['deploy_success'] = True # if deployment to one org fails, the entire deploy was not successful for result in deploy_results: if result['success'] == False: response['deploy_success'] = False break return json.dumps(response) else: return json.dumps(deploy_results, index=4)
def execute(self): archive_deployments = config.connection.get_plugin_client_setting("mm_archive_deployments", True) finish_deploy = self.params.get('finish', False) compare = config.connection.get_plugin_client_setting("mm_compare_before_deployment", True) destinations = self.params['destinations'] deploy_metadata = config.sfdc_client.retrieve(package=self.params['package']) deploy_name = self.params.get('new_deployment_name', None) threads = [] if not finish_deploy and compare: source_retrieve_result = config.sfdc_client.retrieve(package=self.params['package']) debug('source_retrieve_result') debug(source_retrieve_result) source_dict = {} for fp in source_retrieve_result.fileProperties: source_dict[fp.fileName] = fp debug('source_dict') debug(source_dict) #need to compare package.xml to destination orgs here for destination in destinations: thread = CompareHandler(config.project, destination, self.params, self.params['package']) threads.append(thread) thread.start() compare_results = [] for thread in threads: thread.join() compare_results.append(thread.result) debug('compare_results') debug(compare_results) destination_dict = {} for cr in compare_results: cr_dict = {} for fpfp in cr.fileProperties: cr_dict[fpfp.fileName] = fpfp destination_dict[cr.username] = cr_dict debug('destination_dict') debug(destination_dict) final_compare_result = {} for d in destinations: final_compare_result[d['username']] = {} for file_name, file_details in source_dict.iteritems(): if 'package.xml' in file_name: continue; for username, username_value in destination_dict.iteritems(): destination_retrieve_details = destination_dict[username] if 'package.xml' in file_name: continue short_file_name = file_name.split('/')[-1] mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1]) if file_name not in destination_retrieve_details: final_compare_result[username][file_name] = { 'name' : short_file_name, 'type' : mtype['xmlName'], 'action': 'insert', 'message' : 'Create' } else: destination_file_detail = destination_retrieve_details[file_name] source_file_detail = source_dict[file_name] if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate: final_compare_result[username][file_name] = { 'name' : short_file_name, 'type' : mtype['xmlName'], 'action' : 'update', 'message' : 'You will overwrite this file' } else: final_compare_result[username][file_name] = { 'name' : short_file_name, 'type' : mtype['xmlName'], 'action' : 'update_conflict', 'message' : 'Destination file is newer than source file' } # final_compare_result = {} # for d in destinations: # final_compare_result[d['username']] = {} # for username, username_value in destination_dict.iteritems(): # #destination_dict = destination_dict[username] # for file_name, file_details in username_value.iteritems(): # if 'package.xml' in file_name: # continue; # short_file_name = file_name.split('/')[-1] # mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1]) # if file_name not in source_dict: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action': 'insert', # 'message' : 'Create' # } # else: # destination_file_detail = username_value[file_name] # source_file_detail = source_dict[file_name] # if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action' : 'update', # 'message' : 'You will overwrite this file' # } # else: # final_compare_result[username][file_name] = { # 'name' : short_file_name, # 'type' : mtype['xmlName'], # 'action' : 'update_conflict', # 'message' : 'Destination file is newer than source file' # } debug('final_compare_result') debug(final_compare_result) if self.args.respond_with_html == True: html = util.generate_html_response('deploy_compare', final_compare_result, self.params) response = json.loads(util.generate_success_response(html, "html")) response['compare_success'] = True # if deployment to one org fails, the entire deploy was not successful # for result in final_compare_result: # if result['success'] == False: # response['compare_success'] = False # break return json.dumps(response) else: return json.dumps(final_compare_result,indent=4) for destination in destinations: if archive_deployments: deploy_path = os.path.join(config.project.location,"deploy",destination['username']) if not os.path.exists(deploy_path): os.makedirs(deploy_path) if not os.path.isfile(os.path.join(config.project.location,"deploy",'.config')): config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb') config_file_contents = { 'deployments' : { 'named' : [], 'timestamped' : [] } } config_file.write(json.dumps(config_file_contents)) config_file.close() ts = time.time() if not config.is_windows: timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') else: timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H %M %S') if deploy_name: if os.path.isdir(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)): shutil.rmtree(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)) os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)) util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],deploy_name)) config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config')) named_deployment = { 'destination' : destination['username'], 'name' : deploy_name, 'timestamp' : timestamp, 'id' : util.get_random_string(30), 'package' : os.path.join(config.project.location,"deploy",destination['username'],deploy_name,'unpackaged','package.xml') } config_file_json['deployments']['named'].append(named_deployment) config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() else: os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],timestamp)) util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],timestamp)) config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config')) timestamped_deployment = { 'destination' : destination['username'], 'timestamp' : timestamp, 'id' : util.get_random_string(30), 'package' : os.path.join(config.project.location,"deploy",destination['username'],timestamp,'unpackaged','package.xml') } config_file_json['deployments']['timestamped'].append(timestamped_deployment) config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata) threads.append(thread) thread.start() deploy_results = [] for thread in threads: thread.join() deploy_results.append(thread.result) if self.args.respond_with_html == True: html = util.generate_html_response(self.args.operation, deploy_results, self.params) response = json.loads(util.generate_success_response(html, "html")) response['deploy_success'] = True # if deployment to one org fails, the entire deploy was not successful for result in deploy_results: if result['success'] == False: response['deploy_success'] = False break return json.dumps(response) else: return json.dumps(deploy_results,index=4)
def execute(self): project = config.project files = self.params.get('files', None) use_tooling_api = config.connection.get_plugin_client_setting('mm_compile_with_tooling_api', False) check_for_conflicts = config.connection.get_plugin_client_setting('mm_compile_check_conflicts', False) compiling_apex_metadata = True for f in files: if f.split('.')[-1] not in util.TOOLING_API_EXTENSIONS: #cannot use tooling api compiling_apex_metadata = False break #when compiling apex metadata, check to see if it is newer on the server if check_for_conflicts and compiling_apex_metadata: if 'action' not in self.params or self.params['action'] != 'overwrite': has_conflict, msg = config.project.conflict_manager.check_for_conflicts(files) if has_conflict: return msg #use tooling api here, if possible if use_tooling_api == True and compiling_apex_metadata and int(float(util.SFDC_API_VERSION)) >= 27: if 'metadata_container' not in project.settings or project.settings['metadata_container'] == None: container_id = project.sfdc_client.get_metadata_container_id() new_settings = project.settings new_settings['metadata_container'] = container_id project.put_settings_file(new_settings) else: container_id = project.settings['metadata_container'] file_ext = files[0].split('.')[-1] try: result = project.sfdc_client.compile_with_tooling_api(files, container_id) except MetadataContainerException as e: project.sfdc_client.delete_mavensmate_metadatacontainers_for_this_user() response = project.sfdc_client.new_metadatacontainer_for_this_user() project.update_setting("metadata_container",response["id"]) #return CompileSelectedMetadataCommand(params=self.params,args=self.args).execute() #ensure only a single retry result = project.sfdc_client.compile_with_tooling_api(files, response["id"]) if 'Id' in result and 'State' in result: if result['State'] == 'Completed': project.conflict_manager.refresh_local_store(files=files) return util.generate_response(result) #the user has either chosen not to use the tooling api, or it's non apex metadata else: try: for f in files: if '-meta.xml' in f: corresponding_file = f.split('-meta.xml')[0] if corresponding_file not in files: files.append(corresponding_file) for f in files: if '-meta.xml' in f: continue file_ext = f.split('.')[-1] metadata_type = util.get_meta_type_by_suffix(file_ext) if metadata_type == None: if sys.platform == "win32": dir_parts = f.split("\\") else: dir_parts = f.split("/") if 'documents' in dir_parts: metadata_type = util.get_meta_type_by_name("Document") if metadata_type != None and 'metaFile' in metadata_type and metadata_type['metaFile'] == True: corresponding_file = f + '-meta.xml' if corresponding_file not in files: files.append(corresponding_file) metadata_package_dict = util.get_metadata_hash(files) #debug(metadata_package_dict) tmp = util.put_tmp_directory_on_disk() os.makedirs(os.path.join(tmp,"unpackaged")) #copy files from project directory to tmp for full_file_path in files: if 'package.xml' in full_file_path: continue if config.is_windows: destination = os.path.join(tmp,'unpackaged',full_file_path.split('\src\\')[1]) else: destination = os.path.join(tmp,'unpackaged',full_file_path.split('/src/')[1]) destination_directory = os.path.dirname(destination) if not os.path.exists(destination_directory): os.makedirs(destination_directory) shutil.copy2(full_file_path, destination_directory) package_xml = util.get_package_xml_contents(metadata_package_dict) util.put_package_xml_in_directory(os.path.join(tmp,"unpackaged"), package_xml) zip_file = util.zip_directory(tmp, tmp) deploy_params = { "zip_file" : zip_file, "rollback_on_error" : True, "ret_xml" : True } deploy_result = project.sfdc_client.deploy(deploy_params) d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor) result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result'] shutil.rmtree(tmp) # Get new properties for the files we just compiled if result['success'] == True: project.conflict_manager.refresh_local_store(files=files) return json.dumps(result) except Exception, e: try: shutil.rmtree(tmp) except: pass return util.generate_error_response(e.message)
def execute(self): project = config.project sfdc_client = config.sfdc_client metadata_type = self.params.get('metadata_type', None) github_template = self.params.get('github_template', None) params = self.params.get('params', None) if params == None: raise MMException('The payload to create metadata has recently changed. If you are using Sublime Text, you likely need to update your MavensMate plugin to 3.4.8+') if "api_name" not in params or params["api_name"] == None: return util.generate_error_response("You must provide a name for the new metadata.") api_name = params.get('api_name') if sfdc_client.does_metadata_exist(object_type=metadata_type, name=api_name) == True: mt = util.get_meta_type_by_name(metadata_type) filepath = os.path.join(project.location, 'src', mt['directoryName'], api_name+'.'+mt['suffix']) fetched = "" if not os.path.exists(filepath): self.params['files'] = [filepath] RefreshSelectedMetadataCommand(params=self.params,args=self.args).execute() fetched = ", fetched metadata file from server" raise MMException("This API name is already in use in your org" + fetched + ".") tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True) util.put_skeleton_files_on_disk(metadata_type, tmp_unpackaged, github_template, params) package_xml_body = util.get_package_xml_contents({metadata_type : [ api_name ]}) util.put_package_xml_in_directory(tmp_unpackaged, package_xml_body) zip_file = util.zip_directory(tmp, tmp) deploy_params = { "zip_file" : zip_file, "rollback_on_error" : True, "ret_xml" : True } deploy_result = sfdc_client.deploy(deploy_params) d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor) meta_dir = "" files = [] path = None for dirname, dirnames, filenames in os.walk(tmp_unpackaged): for filename in filenames: if 'package.xml' in filename: continue full_file_path = os.path.join(dirname, filename) if '-meta.xml' in filename: extension = filename.replace('-meta.xml','').split(".")[-1] else: extension = filename.split(".")[-1] mt = util.get_meta_type_by_suffix(extension) if mt != None: meta_dir = mt['directoryName'] path = os.path.join(project.location, 'src', meta_dir) if not os.path.exists(path): os.makedirs(path) files.append(os.path.join(path, filename)) elif extension != "xml": continue; # only apex files and meta.xml files should make it to here shutil.copy(full_file_path, path) shutil.rmtree(tmp) project.update_package_xml_with_metadata(metadata_type, api_name) project.conflict_manager.refresh_local_store(files=files) return json.dumps(d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result'])