def get_org_metadata(raw=False, selectBasedOnPackageXml=False, selectedIds=[], keyword=None, **kwargs): # debug('getting org metadata!'); # debug(raw) # debug(selectBasedOnPackageXml) # debug(kwargs.get('package_location', None)) project = config.project if project.get_is_metadata_indexed(): if raw: org_metadata_raw = util.get_file_as_string(os.path.join(project.location,"config",".org_metadata")) org_index = json.loads(org_metadata_raw) if kwargs.get('package_location', None) != None: project.select_metadata_based_on_package_xml(org_index, kwargs.get('package_location')) elif selectBasedOnPackageXml: project.select_metadata_based_on_package_xml(org_index) elif len(selectedIds) > 0 or keyword != None: if keyword != None: crawlJson.setVisibility(org_index, keyword) if len(selectedIds) > 0: crawlJson.setChecked(org_index, selectedIds) return json.dumps(org_index) else: org_index = util.parse_json_from_file(os.path.join(project.location,"config",".org_metadata")) if selectBasedOnPackageXml: project.select_metadata_based_on_package_xml(org_index) elif len(selectedIds) > 0 or keyword != None: if keyword != None: crawlJson.setVisibility(org_index, keyword) if len(selectedIds) > 0: crawlJson.setChecked(org_index, selectedIds) return org_index else: IndexMetadataCommand(params=self.params).execute() org_index = util.parse_json_from_file(os.path.join(project.location,"config",".org_metadata")) project.select_metadata_based_on_package_xml(org_index) return org_index
def get_apex_completions(search_name, search_name_extra=None): debug('Attempting to get completions') debug('search_name: ',search_name) debug('search_name_extra: ',search_name_extra) if os.path.exists(os.path.join(config.project.location, 'config', '.symbols')): #class_name_json = os.path.basename(class_name).replace(".cls","json") if os.path.exists(os.path.join(config.project.location, 'config', '.symbols', search_name+".json")): symbol_table = util.parse_json_from_file(os.path.join(config.project.location, "config", ".symbols", search_name+".json")) if search_name_extra == None or search_name_extra == '': return get_symbol_table_completions(symbol_table) elif 'innerClasses' in symbol_table and len(symbol_table['innerClasses']) > 0: for inner in symbol_table['innerClasses']: if inner["name"] == search_name_extra: return get_completions_for_inner_class(inner) if not os.path.exists(os.path.join(config.project.location, 'config', '.apex_file_properties')): return [] apex_props = util.parse_json_from_file(os.path.join(config.project.location, "config", ".apex_file_properties")) for p in apex_props.keys(): if p == search_name+".cls" and 'symbolTable' in apex_props[p] and apex_props[p]["symbolTable"] != None: symbol_table = apex_props[p]['symbolTable'] if search_name_extra == None or search_name_extra == '': return get_symbol_table_completions(symbol_table) elif 'innerClasses' in symbol_table and len(symbol_table['innerClasses']) > 0: for inner in symbol_table['innerClasses']: if inner["name"] == search_name_extra: return get_completions_for_inner_class(inner) debug('no symbol table found for '+search_name)
def get_plugin_client_settings(): settings = {} settings['default'] = util.parse_json_from_file( os.path.join(os.path.dirname(__file__), "default_client_settings.json")) settings['user'] = util.parse_json_from_file( os.path.join(os.path.dirname(__file__), "user_client_settings.json")) return settings
def get_field_completions(object_name): completions = [] if os.path.isfile(os.path.join(config.project.location,"src","objects",object_name+".object")): #=> object fields from src directory (more info on field metadata, so is primary) object_dom = parse(os.path.join(config.project.location,"src","objects",object_name+".object")) for node in object_dom.getElementsByTagName('fields'): field_name = '' field_type = '' for child in node.childNodes: if child.nodeName != 'fullName' and child.nodeName != 'type': continue if child.nodeName == 'fullName': field_name = child.firstChild.nodeValue elif child.nodeName == 'type': field_type = child.firstChild.nodeValue completions.append((field_name+" \t"+field_type, field_name)) return sorted(completions) elif os.path.isfile(os.path.join(config.project.location,"config",".org_metadata")): #=> parse org metadata, looking for object fields jsonData = util.parse_json_from_file(os.path.join(config.project.location,"config",".org_metadata")) for metadata_type in jsonData: if 'xmlName' in metadata_type and metadata_type['xmlName'] == 'CustomObject': for object_type in metadata_type['children']: if 'text' in object_type and object_type['text'].lower() == object_name.lower(): for attr in object_type['children']: if 'text' in attr and attr['text'] == 'fields': for field in attr['children']: completions.append((field['text'], field['text'])) return completions
def get_plugin_client_settings(self): settings = {} user_path = self.get_plugin_settings_path("User") def_path = self.get_plugin_settings_path("MavensMate") ''' if the default path for settings is none, we're either dealing with a bad client setup or a new client like Atom.io. Let's load the settings from the default cache and optionally allow them to pipe settings in via STDIN ''' if def_path == None: if 'ATOM' in self.plugin_client: file_name = 'atom' elif 'SUBLIME_TEXT' in self.plugin_client: file_name = 'st3' elif 'BRACKETS' in self.plugin_client: file_name = 'brackets' settings['default'] = util.parse_json_from_file( config.base_path + "/" + config.support_dir + "/config/" + file_name + ".json") if config.plugin_client_settings != None: settings['user'] = config.plugin_client_settings else: workspace = self.params.get('workspace', None) if self.project_name != None and workspace != None: try: settings['project'] = util.parse_json_from_file( os.path.join(workspace, self.project_name, self.project_name + '.sublime-settings')) except: debug('Project settings could not be loaded') if not user_path == None: try: settings['user'] = util.parse_json_from_file(user_path) except: debug('User settings could not be loaded') if not def_path == None: try: settings['default'] = util.parse_json_from_file(def_path) except: raise MMException( 'Could not load default MavensMate settings.') if settings == {}: raise MMException( 'Could not load MavensMate settings. Please ensure they contain valid JSON' ) return settings
def get_symbol_table(class_name): try: if os.path.exists(os.path.join(config.project.location, 'config', '.symbols')): class_name_json = os.path.basename(class_name).replace(".cls","json") if os.path.exists(os.path.join(config.project.location, 'config', '.symbols', class_name_json+".json")): return util.parse_json_from_file(os.path.join(config.project.location, "config", ".symbols", class_name_json+".json")) if not os.path.exists(os.path.join(config.project.location, 'config', '.apex_file_properties')): return None apex_props = util.parse_json_from_file(os.path.join(config.project.location, "config", ".apex_file_properties")) for p in apex_props.keys(): if p == class_name+".cls" and 'symbolTable' in apex_props[p]: return apex_props[p]['symbolTable'] return None except: return None
def get_plugin_client_settings(self): settings = {} user_path = self.get_plugin_settings_path("User") def_path = self.get_plugin_settings_path("MavensMate") """ if the default path for settings is none, we're either dealing with a bad client setup or a new client like Atom.io. Let's load the settings from the default cache and optionally allow them to pipe settings in via STDIN """ if def_path == None: if "ATOM" in self.plugin_client: file_name = "atom" elif "SUBLIME_TEXT" in self.plugin_client: file_name = "st3" elif "BRACKETS" in self.plugin_client: file_name = "brackets" settings["default"] = util.parse_json_from_file( config.base_path + "/" + config.support_dir + "/config/" + file_name + ".json" ) if config.plugin_client_settings != None: settings["user"] = config.plugin_client_settings else: workspace = self.params.get("workspace", None) if self.project_name != None and workspace != None: try: settings["project"] = util.parse_json_from_file( os.path.join(workspace, self.project_name, self.project_name + ".sublime-settings") ) except: debug("Project settings could not be loaded") if not user_path == None: try: settings["user"] = util.parse_json_from_file(user_path) except: debug("User settings could not be loaded") if not def_path == None: try: settings["default"] = util.parse_json_from_file(def_path) except: raise MMException("Could not load default MavensMate settings.") if settings == {}: raise MMException("Could not load MavensMate settings. Please ensure they contain valid JSON") return settings
def get_local_store(self): local_store = None try: local_store = util.parse_json_from_file(self.local_store_path) except: pass if local_store == None: local_store = {} return local_store
def execute(self): if 'script_name' in self.params: #running an apex script self.params["body"] = util.get_file_as_string( os.path.join(config.project.location, "apex-scripts", self.params["script_name"])) if 'debug_categories' not in self.params and not os.path.isfile( os.path.join(config.project.location, "config", ".apex_script")): self.params["debug_categories"] = [{ "category": "Apex_code", "level": "DEBUG" }] elif os.path.isfile( os.path.join(config.project.location, "config", ".apex_script")): log_settings = util.parse_json_from_file( os.path.join(config.project.location, "config", ".apex_script")) categories = [] levels = log_settings["levels"] for category in levels.keys(): categories.append({ "category": category, "level": levels[category] }) self.params["debug_categories"] = categories elif 'debug_categories' not in self.params: self.params["debug_categories"] = [{ "category": "Apex_code", "level": "DEBUG" }] return_log = self.params.get("return_log", True) execute_result = config.sfdc_client.execute_apex(self.params) result = { 'column': execute_result['column'], 'compileProblem': execute_result['compileProblem'], 'compiled': execute_result['compiled'], 'exceptionMessage': execute_result['exceptionMessage'], 'exceptionStackTrace': execute_result['exceptionStackTrace'], 'line': execute_result['line'], 'success': execute_result['success'], } if 'log' in execute_result and return_log: result['log'] = execute_result['log'] if result['success']: log_apex = config.connection.get_plugin_client_setting( 'mm_log_anonymous_apex', False) if log_apex: location = config.project.log_anonymous_apex( self.params['body'], execute_result['log'], self.params.get("script_name", None)) result["log_location"] = location return util.generate_response(result)
def execute(self): objects = [] if config.project != None and config.project.location != None: if config.connection.get_plugin_client_setting('mm_use_org_metadata_for_completions', False): if os.path.isfile(os.path.join(config.project.location,"config",".org_metadata")): #=> parse org metadata, looking for object names jsonData = util.parse_json_from_file(os.path.join(config.project.location,"config",".org_metadata")) for metadata_type in jsonData: if 'xmlName' in metadata_type and metadata_type['xmlName'] == 'CustomObject': for object_type in metadata_type['children']: objects.append({ 'type' : 'CustomObject', 'name' : object_type['text'] }) custom_apex_classes = [] if config.project != None and config.project.location != None: if os.path.isdir(os.path.join(config.project.location,"config",".symbols")): #=> get list of classes for (dirpath, dirnames, filenames) in os.walk(os.path.join(config.project.location,"config",".symbols")): for f in filenames: if '-meta.xml' in f: continue class_name = f.replace(".json", "") custom_apex_classes.append({ 'type' : 'Custom Apex Class', 'name' : class_name }) standard_apex_classes = [] apex_completions = util.parse_json_from_file(os.path.join(config.base_path, config.support_dir, "sforce", "metadata", "apex.json")) for top_level_class_name in apex_completions["publicDeclarations"]["System"].keys(): standard_apex_classes.append({ 'type' : 'Standard Apex Class', 'name' : top_level_class_name }) response = { 'standard' : standard_apex_classes, 'custom' : custom_apex_classes, 'objects' : objects } return util.generate_success_response(response, "string")
def get_plugin_client_settings(self): settings = {} user_path = self.get_plugin_settings_path("User") def_path = self.get_plugin_settings_path("MavensMate") ''' if the default path for settings is none, we're either dealing with a bad client setup or a new client like Atom.io. Let's load the settings from the default cache and optionally allow them to pipe settings in via STDIN ''' if def_path == None: if 'ATOM' in self.plugin_client: file_name = 'atom' elif 'SUBLIME_TEXT' in self.plugin_client: file_name = 'st3' elif 'BRACKETS' in self.plugin_client: file_name = 'brackets' settings['default'] = util.parse_json_from_file(config.base_path + "/"+config.support_dir+"/config/"+file_name+".json") if config.plugin_client_settings != None: settings['user'] = config.plugin_client_settings else: workspace = self.params.get('workspace', None) if self.project_name != None and workspace != None: try: settings['project'] = util.parse_json_from_file(os.path.join(workspace,self.project_name,self.project_name+'.sublime-settings')) except: debug('Project settings could not be loaded') if not user_path == None: try: settings['user'] = util.parse_json_from_file(user_path) except: debug('User settings could not be loaded') if not def_path == None: try: settings['default'] = util.parse_json_from_file(def_path) except: raise MMException('Could not load default MavensMate settings.') if settings == {}: raise MMException('Could not load MavensMate settings. Please ensure they contain valid JSON') return settings
def execute(self): if 'script_name' in self.params: #running an apex script self.params["body"] = util.get_file_as_string(os.path.join(config.project.location,"apex-scripts",self.params["script_name"])) if 'debug_categories' not in self.params and not os.path.isfile(os.path.join(config.project.location,"config",".apex_script")): self.params["debug_categories"] = [ { "category" : "Apex_code", "level" : "DEBUG" } ] elif os.path.isfile(os.path.join(config.project.location,"config",".apex_script")): log_settings = util.parse_json_from_file(os.path.join(config.project.location,"config",".apex_script")) categories = [] levels = log_settings["levels"] for category in levels.keys(): categories.append({ "category" : category, "level" : levels[category] }) self.params["debug_categories"] = categories elif 'debug_categories' not in self.params: self.params["debug_categories"] = [ { "category" : "Apex_code", "level" : "DEBUG" } ] return_log = self.params.get("return_log", True) execute_result = config.sfdc_client.execute_apex(self.params) result = { 'column' : execute_result['column'], 'compileProblem' : execute_result['compileProblem'], 'compiled' : execute_result['compiled'], 'exceptionMessage' : execute_result['exceptionMessage'], 'exceptionStackTrace' : execute_result['exceptionStackTrace'], 'line' : execute_result['line'], 'success' : execute_result['success'], } if 'log' in execute_result and return_log: result['log'] = execute_result['log'] if result['success']: log_apex = config.connection.get_plugin_client_setting('mm_log_anonymous_apex', False) if log_apex: location = config.project.log_anonymous_apex(self.params['body'], execute_result['log'], self.params.get("script_name", None)) result["log_location"] = location return util.generate_response(result)
def execute(self): data = self.params.get("data", None) word = self.params.get("word", None) file_name = self.params.get("file_name", None) # example of data: # # public with sharing class AUTOTEST { # String someString; # public String myPublicString { get; set; } # public AUTOTEST(String foo , Boolean bar) { # ApexPages.StandardController c; # c.cancel(); # String s = 'foo'; # s. if data == None: raise MMException('Please provide data') if file_name == None: raise MMException('Please provide file_name') apex_completions = util.parse_json_from_file(os.path.join(config.base_path, config.support_dir, "sforce", "metadata", "apex.json")) typedef = parsehelp.get_type_definition(data) debug('autocomplete type definition: ') debug(typedef) if '<' not in typedef[2] and '[' not in typedef[2]: if '.' in typedef[2] and '<' not in typedef[2]: type_parts = typedef[2].split('.') typedef_class = type_parts[0] #e.g. ApexPages typedef_class_lower = typedef_class.lower() typedef_class_extra = type_parts[1] #e.g. StandardController typedef_class_extra_lower = typedef_class_extra.lower() else: typedef_class = typedef[2] #e.g. ApexPages typedef_class_lower = typedef_class.lower() typedef_class_extra = typedef[4].replace('.','') #e.g. StandardController typedef_class_extra_lower = typedef_class_extra.lower() if '<' in typedef_class: typedef_class_lower = re.sub('\<.*?\>', '', typedef_class_lower) typedef_class_lower = re.sub('\<', '', typedef_class_lower) typedef_class_lower = re.sub('\>', '', typedef_class_lower) typedef_class = re.sub('\<.*?\>', '', typedef_class) typedef_class = re.sub('\<', '', typedef_class) typedef_class = re.sub('\>', '', typedef_class) if '[' in typedef_class: typedef_class_lower = re.sub('\[.*?\]', '', typedef_class_lower) typedef_class = re.sub('\[.*?\]', '', typedef_class) else: if '<' in typedef[2]: typedef_class = typedef[2].split('<')[0] elif '[' in typedef[2]: typedef_class = typedef[2].split('[')[0] typedef_class_lower = typedef_class.lower() typedef_class_extra = '' typedef_class_extra_lower = '' debug('autocomplete type: ') debug(typedef_class) #String debug('autocomplete type extra: ') debug(typedef_class_extra) #String if word != None and word == 'Page' and os.path.isdir(os.path.join(config.project.location,"src","pages")): for (dirpath, dirnames, filenames) in os.walk(os.path.join(config.project.location,"src","pages")): for f in filenames: if '-meta.xml' in f: continue base_page_name = f.replace(".page", "") completions.append({ 'type' : "Visualforce Page", 'name' : base_page_name }) return util.generate_success_response(completions, 'array') if len(typedef[4]) > 1 and '.' in typedef[4]: #deeply nested, need to look for properties #TODO return util.generate_success_response([], 'array') # # Is typedef_class a STANDARD Apex class? # apex_class_key = typedef_class if apex_class_key == 'DateTime': apex_class_key = 'Datetime' if apex_class_key in apex_completions["publicDeclarations"] and typedef_class_extra_lower == '': apex_class_key = word if apex_class_key == 'DateTime': apex_class_key = 'Datetime' comp_def = apex_completions["publicDeclarations"].get(apex_class_key) for i in comp_def: completions.append(i) return util.generate_success_response(sorted(completions), 'array') elif apex_completions["publicDeclarations"].get(apex_class_key) != None: top_level = apex_completions["publicDeclarations"].get(typedef_class) sub_def = top_level.get(word) if sub_def == None: sub_def = top_level.get(typedef_class_extra) completions = get_symbol_table_completions(sub_def) return util.generate_success_response(sorted(completions), 'array') elif apex_class_key in apex_completions["publicDeclarations"]["System"]: if typedef_class == 'DateTime': typedef_class = 'Datetime' if word == typedef_class: #static comp_def = apex_completions["publicDeclarations"]["System"].get(apex_class_key) else: #instance comp_def = apex_completions["publicDeclarations"]["System"].get(typedef_class) completions = get_symbol_table_completions(comp_def) return util.generate_success_response(sorted(completions), 'array') # # Is typedef_class a CUSTOM Apex class? # # HANDLE CUSTOM APEX CLASS STATIC METHODS # e.g. ===> MyCustomClass.doSomethingCool elif word != None and os.path.isfile(os.path.join(config.project.location,"src","classes",word+".cls")): try: completions = get_apex_completions(word) return util.generate_success_response(sorted(completions), 'array') except: return util.generate_success_response([], 'array') if typedef_class_lower == None: return util.generate_success_response([], 'array') # HANDLE CUSTOM APEX INSTANCE METHOD ## # MyClass foo = new MyClass() # e.g. ===> foo.?? # TODO: do we still need this given the existence of symbol tables, i don't think so? # clazz = parsehelp.extract_class(data) # inheritance = parsehelp.extract_inheritance(data, clazz) # if inheritance != None: # if os.path.isfile(os.path.join(config.project.location,"src","classes",inheritance+".cls")): #=> apex classes # completions = util.get_apex_completions(inheritance, typedef_class) # return sorted(completions) # get symbol table for the seed file symbol_table = get_symbol_table(file_name) if symbol_table != None and "innerClasses" in symbol_table and type(symbol_table["innerClasses"] is list and len(symbol_table["innerClasses"]) > 0): for ic in symbol_table["innerClasses"]: if ic["name"].lower() == typedef_class_lower: completions = get_completions_for_inner_class(ic) return util.generate_success_response(sorted(completions), 'array') if os.path.isfile(os.path.join(config.project.location,"src","classes",typedef_class+".cls")): #=> apex classes completions = get_apex_completions(typedef_class, typedef_class_extra) return util.generate_success_response(sorted(completions), 'array') #TODO: finish return util.generate_success_response([], 'array') if typedef_class.endswith('__r'): typedef_class = typedef_class.replace('__r', '__c') if os.path.isfile(os.path.join(config.project.location,"src","objects",typedef_class+".object")): #=> object fields from src directory (more info on field metadata, so is primary) object_dom = parse(os.path.join(config.project.location,"src","objects",typedef_class+".object")) for node in object_dom.getElementsByTagName('fields'): field_name = '' field_type = '' for child in node.childNodes: if child.nodeName != 'fullName' and child.nodeName != 'type': continue if child.nodeName == 'fullName': field_name = child.firstChild.nodeValue elif child.nodeName == 'type': field_type = child.firstChild.nodeValue completions.append((field_name+" \t"+field_type, field_name)) return sorted(completions) elif os.path.isfile(os.path.join(config.project.location,"config",".org_metadata")) and settings.get('mm_use_org_metadata_for_completions', False): #=> parse org metadata, looking for object fields jsonData = util.parse_json_from_file(os.path.join(config.project.location,"config",".org_metadata")) for metadata_type in jsonData: if 'xmlName' in metadata_type and metadata_type['xmlName'] == 'CustomObject': for object_type in metadata_type['children']: if 'text' in object_type and object_type['text'].lower() == typedef_class_lower: for attr in object_type['children']: if 'text' in attr and attr['text'] == 'fields': for field in attr['children']: completions.append((field['text'], field['text'])) if len(completions) == 0 and '__c' in typedef_class_lower: try: #need to index custom objects here, because it couldnt be found if len(ThreadTracker.get_pending_mm_panel_threads(sublime.active_window())) == 0: params = { 'metadata_types' : ['CustomObject'] } mm.call('refresh_metadata_index', False, params=params) except: debug('Failed to index custom object metadata') else: completions.append(('Id', 'Id')) return (sorted(completions), completion_flags) else: return []
def execute(self): project = config.project sfdc_client = config.sfdc_client if "files" in self.params: if "type" in self.params: open_type = self.params.get("type", None) else: open_type = "edit" files = self.params.get("files", None) if len(files) > 0: apex_file_properties = util.parse_json_from_file(os.path.join(project.location,"config",".local_store")) opened = [] for fileabs in files: basename = os.path.basename(fileabs) if basename not in apex_file_properties: # make sure we have meta data and then get the object type if os.path.isfile(fileabs+"-meta.xml"): xmldoc = minidom.parse(fileabs+"-meta.xml") root = xmldoc.firstChild object_type = root.nodeName else: continue object_id = sfdc_client.get_apex_entity_id_by_name(object_type=object_type, name=basename) if not object_id: continue else: props = apex_file_properties[basename] object_type = props['type'] object_id = props['id'] # only ApexClasses that are global and have webservice scope have WSDL files if open_type == "wsdl": if object_type != "ApexClass": continue with open(fileabs, 'r') as content_file: content = content_file.read() p = re.compile("global\s+(abstract\s+)?class\s", re.I + re.M) if not p.search(content): continue p = re.compile("\swebservice\s", re.I + re.M) if not p.search(content): continue # get the server instance url and set the redirect url frontdoor = "https://" + sfdc_client.server_url.split('/')[2] + "/secur/frontdoor.jsp?sid=" + sfdc_client.sid + "&retURL=" if open_type == "wsdl": f, e = os.path.splitext(basename) ret_url = "/services/wsdl/class/" + f else: f, ext = os.path.splitext(basename) if object_type == "CustomObject" and not f.endswith('__c'): # standard object? ret_url = "/p/setup/layout/LayoutFieldList?type=" + f + "%23CustomFieldRelatedList_target" else: ret_url = "/" + object_id # open the browser window for this file and track it webbrowser.open(frontdoor+ret_url, new=2) opened.append(basename) if len(opened) == 0: return util.generate_error_response("There were no valid files to open.") return util.generate_success_response("Opened "+(", ".join(opened))+" on server.") return util.generate_error_response("Unable to open file on server.") else: raise MMException("To open on Salesforce, you must provide an array of 'files'")
def execute(self): archive_deployments = config.connection.get_plugin_client_setting("mm_archive_deployments", True) finish_deploy = self.params.get('finish', False) compare = config.connection.get_plugin_client_setting("mm_compare_before_deployment", True) destinations = self.params['destinations'] deploy_metadata = config.sfdc_client.retrieve(package=self.params['package']) deploy_name = self.params.get('new_deployment_name', None) threads = [] if not finish_deploy and compare: source_retrieve_result = config.sfdc_client.retrieve(package=self.params['package']) debug('source_retrieve_result') debug(source_retrieve_result) source_dict = {} for fp in source_retrieve_result.fileProperties: source_dict[fp.fileName] = fp debug('source_dict') debug(source_dict) #need to compare package.xml to destination orgs here for destination in destinations: thread = CompareHandler(config.project, destination, self.params, self.params['package']) threads.append(thread) thread.start() compare_results = [] for thread in threads: thread.join() compare_results.append(thread.result) debug('compare_results') debug(compare_results) destination_dict = {} for cr in compare_results: if 'success' in cr and cr['success'] == False: destination_dict[cr['username']] = cr else: cr_dict = {} for fpfp in cr.fileProperties: cr_dict[fpfp.fileName] = fpfp destination_dict[cr.username] = cr_dict debug('destination_dict') debug(destination_dict) final_compare_result = {} for d in destinations: final_compare_result[d['username']] = {} for file_name, file_details in source_dict.iteritems(): if 'package.xml' in file_name: continue; for username, username_value in destination_dict.iteritems(): if 'success' in username_value and username_value['success'] == False: final_compare_result[username] = username_value else: destination_retrieve_details = destination_dict[username] if 'package.xml' in file_name: continue short_file_name = file_name.split('/')[-1] mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1]) if file_name not in destination_retrieve_details: final_compare_result[username][short_file_name] = { 'name' : short_file_name, 'type' : mtype['xmlName'], 'action': 'insert', 'message' : 'Create' } else: destination_file_detail = destination_retrieve_details[file_name] source_file_detail = source_dict[file_name] if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate: final_compare_result[username][file_name] = { 'name' : short_file_name, 'type' : mtype['xmlName'], 'action' : 'update', 'message' : 'You will overwrite this file' } else: final_compare_result[username][file_name] = { 'name' : short_file_name, 'type' : mtype['xmlName'], 'action' : 'update_conflict', 'message' : 'Destination file is newer than source file' } debug('final_compare_result') debug(final_compare_result) if self.args.respond_with_html == True: html = util.generate_html_response('deploy_compare', final_compare_result, self.params) response = util.generate_success_response(html, "html") # returns json response['compare_success'] = True # if deployment to one org fails, the entire deploy was not successful # for result in final_compare_result: # if result['success'] == False: # response['compare_success'] = False # break return json.dumps(response) else: return json.dumps(final_compare_result,indent=4) for destination in destinations: if archive_deployments: deploy_path = os.path.join(config.project.location,"deploy",destination['username']) if not os.path.exists(deploy_path): os.makedirs(deploy_path) if not os.path.isfile(os.path.join(config.project.location,"deploy",'.config')): config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb') config_file_contents = { 'deployments' : { 'named' : [], 'timestamped' : [] } } config_file.write(json.dumps(config_file_contents)) config_file.close() ts = time.time() if not config.is_windows: timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') else: timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H %M %S') if deploy_name: if os.path.isdir(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)): shutil.rmtree(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)) os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)) util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],deploy_name)) config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config')) named_deployment = { 'destination' : destination['username'], 'name' : deploy_name, 'timestamp' : timestamp, 'id' : util.get_random_string(30), 'package' : os.path.join(config.project.location,"deploy",destination['username'],deploy_name,'unpackaged','package.xml') } config_file_json['deployments']['named'].append(named_deployment) config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() else: os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],timestamp)) util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],timestamp)) config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config')) timestamped_deployment = { 'destination' : destination['username'], 'timestamp' : timestamp, 'id' : util.get_random_string(30), 'package' : os.path.join(config.project.location,"deploy",destination['username'],timestamp,'unpackaged','package.xml') } config_file_json['deployments']['timestamped'].append(timestamped_deployment) config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb') config_file.write(json.dumps(config_file_json)) config_file.close() thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata) threads.append(thread) thread.start() deploy_results = [] for thread in threads: thread.join() deploy_results.append(thread.result) if self.args.respond_with_html == True: html = util.generate_html_response(self.args.operation, deploy_results, self.params) response = util.generate_success_response(html, "html") # returns json response['deploy_success'] = True # if deployment to one org fails, the entire deploy was not successful for result in deploy_results: if result['success'] == False: response['deploy_success'] = False break return json.dumps(response) else: return json.dumps(deploy_results,index=4)
def get_plugin_client_settings(): settings = {} settings['default'] = util.parse_json_from_file(os.path.join(os.path.dirname(__file__),"default_client_settings.json")) settings['user'] = util.parse_json_from_file(os.path.join(os.path.dirname(__file__),"user_client_settings.json")) return settings