def delete_apex_checkpoint(self, **kwargs): if 'overlay_id' in kwargs: r = requests.delete(self.get_tooling_url()+"/sobjects/ApexExecutionOverlayAction/{0}".format(kwargs['overlay_id']), headers=self.get_rest_headers(), proxies=self.__get_proxies(), verify=False) if self.__is_failed_request(r): self.__exception_handler(r) return util.generate_success_response('OK') else: id = kwargs.get('id', None) file_path = kwargs.get('file_path', None) line_number = kwargs.get('line_number', None) if id == None: ext = util.get_file_extension_no_period(file_path) api_name = util.get_file_name_no_extension(file_path) mtype = util.get_meta_type_by_suffix(ext) id = self.get_apex_entity_id_by_name(object_type=mtype['xmlName'], name=api_name) query_string = "Select Id from ApexExecutionOverlayAction Where ExecutableEntityId = '{0}' AND Line = {1}".format(id, line_number) r = requests.get(self.get_tooling_url()+"/query/", params={'q':query_string}, headers=self.get_rest_headers(), proxies=self.__get_proxies(), verify=False) if self.__is_failed_request(r): self.__exception_handler(r) query_result = util.parse_rest_response(r.text) overlay_id = query_result['records'][0]['Id'] r = requests.delete(self.get_tooling_url()+"/sobjects/ApexExecutionOverlayAction/{0}".format(overlay_id), headers=self.get_rest_headers(), proxies=self.__get_proxies(), verify=False) if self.__is_failed_request(r): self.__exception_handler(r) return util.generate_success_response('OK')
def check_for_conflicts(self, files): local_store = self.get_local_store() retrieve_result = self.project.get_retrieve_result({"files": files}) properties = retrieve_result.fileProperties for f in files: ext = util.get_file_extension_no_period(f) apex_type = util.get_meta_type_by_suffix(ext) apex_entity_api_name = util.get_file_name_no_extension(f) body_field = 'Body' if apex_type['xmlName'] == 'ApexPage' or apex_type[ 'xmlName'] == 'ApexComponent': body_field = 'Markup' api_name_plus_extension = apex_entity_api_name + "." + ext server_property = None for p in properties: if p["fullName"] == apex_entity_api_name: server_property = p try: config.api_name_to_id_dict[p["fullName"]] = p["id"] except: pass break if api_name_plus_extension in local_store and server_property != None: local_store_entry = local_store[api_name_plus_extension] local_last_modified_date = local_store_entry[ "lastModifiedDate"] server_last_modified_date = server_property['lastModifiedDate'] last_modified_name = server_property['lastModifiedByName'] qr = self.project.sfdc_client.execute_query( "Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'" .format(body_field, apex_type['xmlName'], apex_entity_api_name)) # lets use the soap endpoint here to help the folks being affected by their proxy refusing REST requests bc of Authorization header # https://github.com/joeferraro/MavensMate-SublimeText/issues/315#issuecomment-35996112 # try: # qr = self.project.sfdc_client.query("Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'".format(body_field, apex_type['xmlName'], apex_entity_api_name)) # except: # qr = self.project.sfdc_client.execute_query("Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'".format(body_field, apex_type['xmlName'], apex_entity_api_name)) body = qr['records'][0][body_field] body = body.encode('utf-8') if str(local_last_modified_date) != str( server_last_modified_date ) or local_store_entry['mmState'] == 'dirty': if local_store_entry['mmState'] != 'dirty': local_store_entry['mmState'] = 'dirty' msg = util.generate_request_for_action_response( "The local version of your file and the server copy are out of sync.\n\n{0} was last modified by {1} on {2}." .format(apex_entity_api_name, last_modified_name, server_last_modified_date), 'compile', ["Diff With Server", "Operation Canceled"], tmp_file_path=util.put_tmp_file_on_disk( apex_entity_api_name, body, apex_type.get('suffix', ''))) self.mark_dirty(api_name_plus_extension) return True, msg return False, None
def check_for_conflicts(self, files): local_store = self.get_local_store() retrieve_result = self.project.get_retrieve_result({"files":files}) properties = retrieve_result.fileProperties for f in files: ext = util.get_file_extension_no_period(f) apex_type = util.get_meta_type_by_suffix(ext) apex_entity_api_name = util.get_file_name_no_extension(f) body_field = 'Body' if apex_type['xmlName'] == 'ApexPage' or apex_type['xmlName'] == 'ApexComponent': body_field = 'Markup' api_name_plus_extension = apex_entity_api_name+"."+ext server_property = None for p in properties: if p["fullName"] == apex_entity_api_name: server_property = p try: config.api_name_to_id_dict[p["fullName"]] = p["id"] except: pass break if api_name_plus_extension in local_store and server_property != None: local_store_entry = local_store[api_name_plus_extension] local_last_modified_date = local_store_entry["lastModifiedDate"] server_last_modified_date = server_property['lastModifiedDate'] last_modified_name = server_property['lastModifiedByName'] qr = self.project.sfdc_client.execute_query("Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'".format(body_field, apex_type['xmlName'], apex_entity_api_name)) # lets use the soap endpoint here to help the folks being affected by their proxy refusing REST requests bc of Authorization header # https://github.com/joeferraro/MavensMate-SublimeText/issues/315#issuecomment-35996112 # try: # qr = self.project.sfdc_client.query("Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'".format(body_field, apex_type['xmlName'], apex_entity_api_name)) # except: # qr = self.project.sfdc_client.execute_query("Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'".format(body_field, apex_type['xmlName'], apex_entity_api_name)) body = qr['records'][0][body_field] body = body.encode('utf-8') if str(local_last_modified_date) != str(server_last_modified_date) or local_store_entry['mmState'] == 'dirty': if local_store_entry['mmState'] != 'dirty': local_store_entry['mmState'] = 'dirty' msg = util.generate_request_for_action_response( "The local version of your file and the server copy are out of sync.\n\n{0} was last modified by {1} on {2}." .format(apex_entity_api_name, last_modified_name, server_last_modified_date), 'compile', ["Diff With Server","Operation Canceled"], tmp_file_path=util.put_tmp_file_on_disk(apex_entity_api_name, body, apex_type.get('suffix', '')) ) self.mark_dirty(api_name_plus_extension) return True, msg return False, None
def get_apex_checkpoints(self, **kwargs): if 'file_path' in kwargs: id = kwargs.get('id', None) file_path = kwargs.get('file_path', None) if id == None: ext = util.get_file_extension_no_period(file_path) api_name = util.get_file_name_no_extension(file_path) mtype = util.get_meta_type_by_suffix(ext) id = self.get_apex_entity_id_by_name(object_type=mtype['xmlName'], name=api_name) query_string = "Select Id, Line, Iteration, ExpirationDate, IsDumpingHeap from ApexExecutionOverlayAction Where ExecutableEntityId = '{0}'".format(id) payload = { 'q' : query_string } r = requests.get(self.get_tooling_url()+"/query/", params=payload, headers=self.get_rest_headers(), proxies=self.__get_proxies(), verify=False) if self.__is_failed_request(r): self.__exception_handler(r) return util.parse_rest_response(r.text) else: query_string = "Select Id, ScopeId, ExecutableEntityId, Line, Iteration, ExpirationDate, IsDumpingHeap from ApexExecutionOverlayAction limit 5000" payload = { 'q' : query_string } r = requests.get(self.get_tooling_url()+"/query/", params=payload, headers=self.get_rest_headers(), proxies=self.__get_proxies(), verify=False) if self.__is_failed_request(r): self.__exception_handler(r) return util.parse_rest_response(r.text)
def run(self): for dirname, dirnames, filenames in os.walk(os.path.join(self.project_location,"src")): for filename in filenames: full_file_path = os.path.join(dirname, filename) ext = util.get_file_extension_no_period(full_file_path) if ext in apex_extensions_to_check: self.apex_files_to_check.append(full_file_path) elif ext in vf_extensions_to_check: self.vf_files_to_check.append(full_file_path) apex_parser_threads = [] vf_parser_threads = [] apex_file_chunks = list(util.grouper(8, self.apex_files_to_check)) vf_file_chunks = list(util.grouper(8, self.vf_files_to_check)) for files in apex_file_chunks: apex_parser_thread = ApexParser(files) apex_parser_threads.append(apex_parser_thread) apex_parser_thread.start() for thread in apex_parser_threads: thread.join() if thread.complete: self.apex_parser_results.update(thread.result) for files in vf_file_chunks: vf_parser_thread = VfParser(files) vf_parser_threads.append(vf_parser_thread) vf_parser_thread.start() for thread in vf_parser_threads: thread.join() if thread.complete: self.vf_parser_results.update(thread.result) #pp = pprint.PrettyPrinter(indent=2) #pp.pprint(self.parser_results) for file_name in self.vf_files_to_check: parser_result = self.vf_parser_results[file_name] base_name = os.path.basename(file_name) self.vf_result[base_name] = {} file_body = util.get_file_as_string(file_name) ### ACTION POLLERS if "actionPollers" not in parser_result: #print file_name continue action_pollers = parser_result["actionPollers"] action_poller_matches = [] if len(action_pollers) > 0: for p in action_pollers: line_contents = "" for lnum in range(p["location"]["row"], p["location"]["row"]+1): line_contents += print_file_line(file_name, lnum) p["lineNumber"] = p["location"]["row"] p["line_contents"] = line_contents action_poller_matches.append(p) self.result["visualforce_statistics"]["action_poller"]["results"].append( { "file_name" : base_name, "flagged" : len(action_poller_matches) > 0, "matches" : action_poller_matches } ) self.action_poller_count += len(action_poller_matches) ### HARDCODED URLS output_links = parser_result["outputLinks"] link_matches = [] if len(output_links) > 0: for p in output_links: line_contents = "" for lnum in range(p["location"]["row"], p["location"]["row"]+1): line_contents += print_file_line(file_name, lnum) p["lineNumber"] = p["location"]["row"] p["line_contents"] = line_contents link_matches.append(p) self.result["visualforce_statistics"]["hardcoded_url"]["results"].append( { "file_name" : base_name, "flagged" : len(link_matches) > 0, "matches" : link_matches } ) self.hardcoded_link_count += len(link_matches) ## REFRESHERS refreshers = re.finditer(js_refresh_pattern, file_body) js_matches = [] meta_matches = [] for match in refreshers: if match != None and "meta" in match.group(0): match_string = match.group(0).replace("<", "") meta_matches.append(match_string) else: match_string = match.group(0) js_matches.append(match_string) if len(js_matches) > 0: self.result["visualforce_statistics"]["javascript_refresh"]["results"].append( { "file_name" : base_name, "flagged" : len(js_matches) > 0, "matches" : js_matches } ) self.javascript_refresh_count += len(js_matches) if len(meta_matches) > 0: self.result["visualforce_statistics"]["meta_refresh"]["results"].append( { "file_name" : base_name, "flagged" : len(meta_matches) > 0, "matches" : meta_matches } ) self.meta_refresh_count += len(meta_matches) for file_name in self.apex_files_to_check: parser_result = self.apex_parser_results[file_name] base_name = os.path.basename(file_name) self.apex_result[base_name] = {} file_body = util.get_file_as_string(file_name) ### WITHOUT SHARING without_sharings = re.finditer(without_sharing_pattern, file_body) matches = [] for match in without_sharings: matches.append(match.group(0)) if len(matches) > 0: self.result["apex_statistics"]["without_sharing"]["results"].append( { "file_name" : base_name, "flagged" : len(matches) > 0, "matches" : matches } ) self.without_sharing_count += len(matches) #print parser_result if "forLoops" not in parser_result: #print file_name continue for_loops = parser_result["forLoops"] dml_statements = parser_result["dmlStatements"] queries = parser_result["queries"] methods = parser_result["methods"] classes = parser_result["classes"] #seealldata see_all_data_matches = [] for m in methods: if "annotations" in m and len(m["annotations"]) > 0: for a in m["annotations"]: if "pairs" in a: for p in a["pairs"]: if p["name"].lower() == "seealldata" and p["value"]["value"] == True: line_contents = "" for lnum in range(p["beginLine"], p["beginLine"]+2): line_contents += print_file_line(file_name, lnum) m["lineNumber"] = p["beginLine"] m["line_contents"] = line_contents see_all_data_matches.append(m) for c in classes: if "annotations" in c and len(c["annotations"]) > 0: for a in c["annotations"]: if "pairs" in a: for p in a["pairs"]: if p["name"].lower() == "seealldata" and p["value"]["value"] == True: line_contents = "" for lnum in range(p["beginLine"], p["beginLine"]+2): line_contents += print_file_line(file_name, lnum) c["lineNumber"] = p["beginLine"] c["line_contents"] = line_contents see_all_data_matches.append(c) if len(see_all_data_matches) > 0: self.result["apex_statistics"]["see_all_data_annotations"]["results"].append( { "file_name" : base_name, "flagged" : len(see_all_data_matches) > 0, "matches" : see_all_data_matches } ) self.see_all_data_count += len(see_all_data_matches) #SOQL WITHOUT WHERE CLAUSES no_where_clause_matches = [] negative_operator_matches = [] for query in queries: line_number = query["lineNumber"] lower_query = query["statement"].lower() #if ' where ' not in lower_query: if where_pattern.search(lower_query) == None: no_where_clause_matches.append(query) #if ' not like ' in lower_query or '!=' in lower_query: if not_like_pattern.search(lower_query) != None or "!=" in lower_query: negative_operator_matches.append(query) if len(no_where_clause_matches) > 0: self.result["apex_statistics"]["soql_no_where_clause"]["results"].append( { "file_name" : base_name, "flagged" : len(no_where_clause_matches) > 0, "matches" : no_where_clause_matches } ) self.no_where_clause_count += len(no_where_clause_matches) if len(negative_operator_matches) > 0: self.result["apex_statistics"]["soql_negative_operators"]["results"].append( { "file_name" : base_name, "flagged" : len(negative_operator_matches) > 0, "matches" : negative_operator_matches } ) self.negative_soql_count += len(negative_operator_matches) ### DML INSIDE ITERATORS dml_matches = [] query_matches = [] if len(for_loops) > 0: for dml in dml_statements: line_number = dml["statement"]["beginLine"] for loop in for_loops: if loop[0] < line_number < loop[1]: #this is a dml statement inside an iterator line_contents = "" for lnum in range(loop[0], loop[1]+1): line_contents += print_file_line(file_name, lnum) dml["lineNumber"] = loop[0] dml["line_contents"] = line_contents dml_matches.append(dml) for query in queries: line_number = query["lineNumber"] for loop in for_loops: if loop[0] < line_number < loop[1]: #this is a soql statement inside an iterator query["line_contents"] = print_file_line(file_name, line_number) query_matches.append(query) if len(dml_matches) > 0: self.result["apex_statistics"]["dml_for_loop"]["results"].append( { "file_name" : base_name, "flagged" : len(dml_matches) > 0, "matches" : dml_matches } ) self.dml_for_loop_count += len(dml_matches) if len(query_matches) > 0: self.result["apex_statistics"]["soql_for_loop"]["results"].append( { "file_name" : base_name, "flagged" : len(query_matches) > 0, "matches" : query_matches } ) self.soql_for_loop_count += len(query_matches) self.result["apex_statistics"]["without_sharing"]["count"] = self.without_sharing_count self.result["apex_statistics"]["dml_for_loop"]["count"] = self.dml_for_loop_count self.result["apex_statistics"]["soql_for_loop"]["count"] = self.soql_for_loop_count self.result["apex_statistics"]["soql_negative_operators"]["count"] = self.negative_soql_count self.result["apex_statistics"]["soql_no_where_clause"]["count"] = self.no_where_clause_count self.result["apex_statistics"]["see_all_data_annotations"]["count"] = self.see_all_data_count self.result["visualforce_statistics"]["action_poller"]["count"] = self.action_poller_count self.result["visualforce_statistics"]["javascript_refresh"]["count"] = self.javascript_refresh_count self.result["visualforce_statistics"]["meta_refresh"]["count"] = self.meta_refresh_count self.result["visualforce_statistics"]["hardcoded_url"]["count"] = self.hardcoded_link_count return self.result