Beispiel #1
0
    def execute(self):
        """
            Executes requested command
        """
        try:
            self.__setup_connection()

            #if the arg switch argument is included, the request is to launch the out of box
            #MavensMate UI, so we generate the HTML for the UI and launch the process
            #example: mm -o new_project --ui
            if self.args.ui_switch == True:
                config.logger.debug('UI operation requested, attempting to launch MavensMate UI')
                tmp_html_file = util.generate_ui(self.operation,self.payload,self.args)
                if config.connection.plugin_client == 'ATOM': #returning location of html file here so we can open the page inside an atom panel
                    self.__printr(util.generate_success_response(tmp_html_file))
                else:
                    util.launch_ui(tmp_html_file)
                    self.__printr(util.generate_success_response('UI Generated Successfully'))
            
            #non-ui command
            else:        
                commands = get_available_commands()
                #debug(commands)
                try:
                    command_clazz = commands[self.operation](params=self.payload,args=self.args)                
                except KeyError:
                    raise MMUnsupportedOperationException('Could not find the operation you requested. Be sure the command is located in mm.commands, inherits from Command (found in basecommand.py) and includes an execute method.')
                except NotImplementedError:
                    raise MMException("This command is not properly implemented. Be sure it contains an 'execute' method.")
                self.__printr(command_clazz.execute())
        except Exception, e:
            self.__printr(e, is_exception=True)
Beispiel #2
0
 def execute(self):
     debug_users = config.project.get_debug_users()
     debug_settings = config.project.get_debug_settings()
     if self.params.get('running_user_only', False):
         payload = {}
         payload["debug_categories"] = debug_settings["levels"]
         payload["expiration"]       = debug_settings["expiration"]
         payload["user_id"]          = config.sfdc_client.user_id
         payload["type"]             = "user"
         response = NewTraceFlagCommand(params=payload).execute()
         response = json.loads(response)
         if "success" in response and response["success"] == False:
             return util.generate_error_response(response["errors"][0])
         return util.generate_success_response('Logging for runner user setup successfully')
     else:
         for u in debug_users:
             payload = {}
             payload["debug_categories"] = debug_settings["levels"]
             payload["expiration"]       = debug_settings["expiration"]
             payload["user_id"]          = u
             payload["type"]             = "user"
             response = NewTraceFlagCommand(params=payload).execute()
             response = json.loads(response)
             if "success" in response and response["success"] == False:
                 return util.generate_error_response(response["errors"][0])
         return util.generate_success_response('{0} Log(s) created successfully'.format(str(len(debug_users))))
Beispiel #3
0
    def execute(self):
        number_of_logs = 0
        limit   = config.connection.get_plugin_client_setting('mm_number_of_logs_limit', 20)
        id_list = ','.join("'"+item+"'" for item in config.project.get_debug_users())
        # log_result = config.sfdc_client.execute_query('Select Id, LogUserId, SystemModstamp From ApexLog Where SystemModstamp >= TODAY and Location != \'HeapDump\' AND LogUserId IN ({0}) order by SystemModstamp desc limit {1}'.format(id_list, str(limit)))

        log_result = config.sfdc_client.execute_query('Select Id, LogUserId, SystemModstamp From ApexLog WHERE Location != \'HeapDump\' AND LogUserId IN ({0}) order by SystemModstamp desc limit {1}'.format(id_list, str(limit)))
        config.logger.debug(log_result)
        logs = []
        if 'records' in log_result:
            # make log directory if it doesnt exist
            if os.path.isdir(os.path.join(config.connection.workspace,config.project.project_name,"debug","logs")) == False:
                os.makedirs(os.path.join(config.connection.workspace,config.project.project_name,"debug","logs"))

            for r in log_result['records']:
                modstamp = str(r["SystemModstamp"])
                if config.is_windows:
                    modstamp = modstamp.replace(':', ' ')
                file_name = modstamp+"-"+r["LogUserId"]+".log"

                # only download the log if it doesn't already exist on the filesystem
                if not os.path.isfile(os.path.join(config.connection.workspace,config.project.project_name,"debug","logs",file_name)):
                    id = r["Id"]
                    log = config.sfdc_client.download_log(id)
                    logs.append({"id":id,"modstamp":modstamp,"log":log,"userid":r["LogUserId"]})
            
            # for the_file in os.listdir(os.path.join(config.connection.workspace,config.project.project_name,"debug","logs")):
            #     file_path = os.path.join(config.connection.workspace,config.project.project_name,"debug","logs", the_file)
            #     try:
            #         if os.path.isfile(file_path):
            #             os.unlink(file_path)
            #     except Exception, e:
            #         print e
            
            new_logs = []
            # write logs to file system
            number_of_logs = len(logs)
            for log in logs:
                modstamp = log["modstamp"]
                if config.is_windows:
                    modstamp = modstamp.replace(':', ' ')
                file_name = modstamp+"-"+log["userid"]+".log"
                src = open(os.path.join(config.connection.workspace,config.project.project_name,"debug","logs",file_name), "w")
                new_logs.append(os.path.join(config.connection.workspace,config.project.project_name,"debug","logs",file_name))
                src.write(log["log"])
                src.close()

            if number_of_logs > 0:
                res = util.generate_success_response(str(number_of_logs)+' logs successfully downloaded')
                res['logs'] = new_logs
            else:
                res = util.generate_success_response('No new logs from today available for download.')
            return res
        else:
            config.logger.debug("No logs to download")
            return util.generate_success_response('No logs from today available to download.')
Beispiel #4
0
    def execute(self):
        log_id = self.params['log_id']
        debug('log_id is: ' + log_id)

        log_result = config.sfdc_client.execute_query(
            "Select Id, LogUserId, SystemModstamp From ApexLog WHERE Id = '{0}'"
            .format(log_id))

        debug('log SOQL result: ')
        debug(log_result)

        logSobject = None
        if 'records' in log_result:
            logSobject = log_result['records'][0]

        # make log directory if it doesnt exist
        if os.path.isdir(
                os.path.join(config.connection.workspace,
                             config.project.project_name, "debug",
                             "logs")) == False:
            os.makedirs(
                os.path.join(config.connection.workspace,
                             config.project.project_name, "debug", "logs"))

        modstamp = str(logSobject["SystemModstamp"])
        if config.is_windows:
            modstamp = modstamp.replace(':', ' ')
        file_name = modstamp + "-" + logSobject["LogUserId"] + ".log"

        file_path = os.path.join(config.connection.workspace,
                                 config.project.project_name, "debug", "logs",
                                 file_name)
        # only download the log if it doesn't already exist on the filesystem
        if os.path.isfile(file_path):
            return util.generate_success_response('Log already downloaded')

        id = logSobject["Id"]
        downloaded_log = config.sfdc_client.download_log(id)
        debug('loaded log: ')
        debug(downloaded_log)

        # write file
        src = open(file_path, "w")
        src.write(downloaded_log)
        src.close()

        log_result = {
            "id": id,
            "modstamp": modstamp,
            "path": file_path,
            "userid": logSobject["LogUserId"]
        }
        res = util.generate_success_response('Log successfully download')
        res['log'] = log_result
        return res
Beispiel #5
0
 def execute(self):
     if self.args.respond_with_html == True:
        health_check_dict = config.project.run_health_check()
        html = util.generate_html_response(self.args.operation, health_check_dict)
        return util.generate_success_response(html, "html")
     else:
        return json.dumps(config.project.run_health_check(),indent=4)
Beispiel #6
0
 def execute(self):
     users = self.params.get('users', None)
     levels = self.params.get('debug_categories', None)
     expiration = self.params.get('expiration', None)
     config.project.put_debug_file(users, levels, expiration)
     return util.generate_success_response(
         "Debug settings updated successfully")
Beispiel #7
0
    def execute(self):
        sfdc_client = config.sfdc_client
 
        empty_package_xml = util.get_empty_package_xml_contents()
        tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True)
        util.put_empty_package_xml_in_directory(tmp_unpackaged, empty_package_xml)
        zip_file = util.zip_directory(tmp, tmp)
        deploy_params = {
            "zip_file"          : zip_file,
            "rollback_on_error" : True,
            "ret_xml"           : True,
            "classes"           : self.params.get('classes', []),
            "debug_categories"  : self.params.get('debug_categories', [])
        }
        deploy_result = sfdc_client.deploy(deploy_params,is_test=True)
        #debug(deploy_result)
        d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor)
        if int(float(util.SFDC_API_VERSION)) >= 29:
            result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']['details']['runTestResult']
        else:
            result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']['runTestResult']

        try:
            result['log'] = d["soapenv:Envelope"]["soapenv:Header"]["DebuggingInfo"]["debugLog"]
        except:
            result['log'] = 'Log not available.'

        shutil.rmtree(tmp)

        if self.args.respond_with_html:
            html = util.generate_html_response(self.args.operation, result, self.params)
            return util.generate_success_response(html, "html")
        else:
            return result
Beispiel #8
0
    def execute(self):
        project = config.project
        if 'directories' in self.params and len(self.params['directories']) == 1 and os.path.basename(self.params['directories'][0]) == "src":
            return project.clean(reset_metadata_container=False)
        else:
            retrieve_result = project.get_retrieve_result(self.params)
            #take this opportunity to freshen the cache
            project.conflict_manager.refresh_local_store(retrieve_result.fileProperties)
            util.extract_base64_encoded_zip(retrieve_result.zipFile, project.location)

            #TODO: handle exception that could render the project unusable bc of lost files
            #replace project metadata with retrieved metadata
            for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"unpackaged")):
                for filename in filenames:
                    full_file_path = os.path.join(dirname, filename)
                    if '/unpackaged/package.xml' in full_file_path or '\\unpackaged\\package.xml' in full_file_path:
                        continue
                    if 'win32' in sys.platform:
                        destination = full_file_path.replace('\\unpackaged\\', '\\src\\')
                    else:
                        destination = full_file_path.replace('/unpackaged/', '/src/')
                    destination_directory = os.path.dirname(destination)
                    if not os.path.exists(destination_directory):
                        os.makedirs(destination_directory)
                    shutil.move(full_file_path, destination)
            shutil.rmtree(os.path.join(project.location,"unpackaged"))
            if os.path.exists(os.path.join(project.location,"metadata.zip")):
                os.remove(os.path.join(project.location,"metadata.zip"))
            return util.generate_success_response("Refresh Completed Successfully")
Beispiel #9
0
 def execute(self):
     debug_users = config.project.get_debug_users()
     debug_settings = config.project.get_debug_settings()
     for user_id in debug_users:
         config.sfdc_client.delete_trace_flags(user_id)
     return util.generate_success_response(
         'Stopped logging for debug users.')
Beispiel #10
0
 def execute(self):
     if self.args.respond_with_html == True:
         health_check_dict = config.project.run_health_check()
         html = util.generate_html_response(self.args.operation,
                                            health_check_dict)
         return util.generate_success_response(html, "html")
     else:
         return json.dumps(config.project.run_health_check(), indent=4)
Beispiel #11
0
    def execute(self):
        if int(float(util.SFDC_API_VERSION)) <= 28 or config.connection.get_plugin_client_setting("mm_use_legacy_test_ui", False):
            #raise MMException("This command requires mm_api_version to be set to 29.0 or higher.")
            return RunUnitTestsCommand(params=self.params,args=self.args).execute()

        project = config.project
        sfdc_client = config.sfdc_client

        generate_logs = self.params.get("generate_logs", False)
        if generate_logs:
            NewQuickTraceFlagCommand(params={"running_user_only":True}).execute()

        test_classes = self.params.get("classes", None)
        debug('running tests for')
        debug(test_classes)
        if test_classes == None or test_classes == []: #need to run all tests in project
            classes = []
            triggers = []
            test_classes = []
            for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"src","classes")):
                for filename in filenames:
                    if "test" in filename.lower() and "-meta.xml" not in filename:
                        test_classes.append(util.get_file_name_no_extension(filename))
                    elif "-meta.xml" not in filename:
                        classes.append(util.get_file_name_no_extension(filename))
            for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"src","triggers")):
                for filename in filenames:
                    if "-meta.xml" not in filename:
                        triggers.append(util.get_file_name_no_extension(filename))
        else: #user has specified certain tests to run
            classes = []
            triggers = []
            for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"src","classes")):
                for filename in filenames:
                    if "test" not in filename.lower() and "-meta.xml" not in filename:
                        classes.append(util.get_file_name_no_extension(filename))
            for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"src","triggers")):
                for filename in filenames:
                    if "-meta.xml" not in filename:
                        triggers.append(util.get_file_name_no_extension(filename))
        
        params = { "files" : test_classes }
        test_results = sfdc_client.run_async_apex_tests(params, False)
        
        params = { "classes" : classes, "triggers" : triggers, "test_classes" : test_classes }
        coverage_report = sfdc_client.get_apex_test_coverage(params, transform_ids=True)
        debug(">>>>>>>>>>")
        debug(coverage_report)
        result = {
            "test_results"  : test_results,
            "coverage"      : coverage_report
        }

        if self.args.respond_with_html:
            html = util.generate_html_response(self.args.operation, result, self.params)
            return util.generate_success_response(html, "html")
        else:
            return result
Beispiel #12
0
    def execute(self):
        result = config.sfdc_client.get_apex_checkpoints()
        if 'records' not in result or len(result['records']) == 0:
            config.project.put_overlays_file('[]')
            return util.generate_success_response(
                'Could Not Find Any Apex Execution Overlays')
        else:
            id_to_name_map = {}
            class_ids = []
            trigger_ids = []

            for r in result['records']:
                entity_id = r["ExecutableEntityId"]
                if entity_id.startswith('01q'):
                    trigger_ids.append("Id = '" + entity_id + "'")
                elif entity_id.startswith('01p'):
                    class_ids.append("Id = '" + entity_id + "'")

            class_filter = ' or '.join(class_ids)
            trigger_filter = ' or '.join(trigger_ids)

            if len(class_ids) > 0:
                soql = 'Select Id, Name From ApexClass WHERE ' + class_filter
                class_result = config.sfdc_client.execute_query(soql)

                if 'records' in class_result:
                    for r in class_result['records']:
                        id_to_name_map[r['Id']] = r['Name']

            if len(trigger_ids) > 0:
                soql = 'Select Id, Name From ApexTrigger WHERE ' + trigger_filter
                trigger_result = config.sfdc_client.execute_query(soql)

                if 'records' in trigger_result:
                    for r in trigger_result['records']:
                        id_to_name_map[r['Id']] = r['Name']

            for r in result['records']:
                r['API_Name'] = id_to_name_map[r['ExecutableEntityId']]

            overlays = json.dumps(result['records'])
            config.project.put_overlays_file(overlays)
            return util.generate_success_response(
                'Apex Execution Overlays Successfully Indexed to config/.overlays'
            )
Beispiel #13
0
    def execute(self):
        """
            Executes requested command
        """
        try:
            self.__setup_connection()

            #if the arg switch argument is included, the request is to launch the out of box
            #MavensMate UI, so we generate the HTML for the UI and launch the process
            #example: mm -o new_project --ui
            if self.args.ui_switch == True:
                config.logger.debug(
                    'UI operation requested, attempting to launch MavensMate UI'
                )
                tmp_html_file = util.generate_ui(self.operation, self.payload,
                                                 self.args)
                if config.connection.plugin_client == 'ATOM':  #returning location of html file here so we can open the page inside an atom panel
                    self.__printr(
                        util.generate_success_response(tmp_html_file))
                else:
                    util.launch_ui(tmp_html_file)
                    self.__printr(
                        util.generate_success_response(
                            'UI Generated Successfully'))

            #non-ui command
            else:
                commands = get_available_commands()
                #debug(commands)
                try:
                    command_clazz = commands[self.operation](
                        params=self.payload, args=self.args)
                except KeyError:
                    raise MMUnsupportedOperationException(
                        'Could not find the operation you requested. Be sure the command is located in mm.commands, inherits from Command (found in basecommand.py) and includes an execute method.'
                    )
                except NotImplementedError:
                    raise MMException(
                        "This command is not properly implemented. Be sure it contains an 'execute' method."
                    )
                self.__printr(command_clazz.execute())
        except Exception, e:
            self.__printr(e, is_exception=True)
Beispiel #14
0
    def execute(self):
        result = config.sfdc_client.get_apex_checkpoints()
        if 'records' not in result or len(result['records']) == 0:
            config.project.put_overlays_file('[]')
            return util.generate_success_response('Could Not Find Any Apex Execution Overlays')
        else:
            id_to_name_map = {}
            class_ids = []
            trigger_ids = []

            for r in result['records']:
                entity_id = r["ExecutableEntityId"]
                if entity_id.startswith('01q'):
                    trigger_ids.append("Id = '"+entity_id+"'")
                elif entity_id.startswith('01p'):
                    class_ids.append("Id = '"+entity_id+"'")

            class_filter = ' or '.join(class_ids)
            trigger_filter = ' or '.join(trigger_ids)

            if len(class_ids) > 0:
                soql = 'Select Id, Name From ApexClass WHERE '+class_filter
                class_result = config.sfdc_client.execute_query(soql)

                if 'records' in class_result:
                    for r in class_result['records']:
                        id_to_name_map[r['Id']] = r['Name']


            if len(trigger_ids) > 0:
                soql = 'Select Id, Name From ApexTrigger WHERE '+trigger_filter
                trigger_result = config.sfdc_client.execute_query(soql)

                if 'records' in trigger_result:
                    for r in trigger_result['records']:
                        id_to_name_map[r['Id']] = r['Name']

            for r in result['records']:
                r['API_Name'] = id_to_name_map[r['ExecutableEntityId']]

            overlays = json.dumps(result['records'])
            config.project.put_overlays_file(overlays)
            return util.generate_success_response('Apex Execution Overlays Successfully Indexed to config/.overlays')
Beispiel #15
0
    def execute(self):
        log_id = self.params['log_id']
        debug('log_id is: '+log_id)

        log_result = config.sfdc_client.execute_query("Select Id, LogUserId, SystemModstamp From ApexLog WHERE Id = '{0}'".format(log_id))
        
        debug('log SOQL result: ')
        debug(log_result)
        
        logSobject = None
        if 'records' in log_result:
            logSobject = log_result['records'][0]

        # make log directory if it doesnt exist
        if os.path.isdir(os.path.join(config.connection.workspace,config.project.project_name,"debug","logs")) == False:
            os.makedirs(os.path.join(config.connection.workspace,config.project.project_name,"debug","logs"))

        modstamp = str(logSobject["SystemModstamp"])
        if config.is_windows:
            modstamp = modstamp.replace(':', ' ')
        file_name = modstamp+"-"+logSobject["LogUserId"]+".log"

        file_path = os.path.join(config.connection.workspace,config.project.project_name,"debug","logs",file_name)
        # only download the log if it doesn't already exist on the filesystem
        if os.path.isfile(file_path):
            return util.generate_success_response('Log already downloaded')


        id = logSobject["Id"]
        downloaded_log = config.sfdc_client.download_log(id)
        debug('loaded log: ')
        debug(downloaded_log)

        # write file
        src = open(file_path, "w")
        src.write(downloaded_log)
        src.close()

        log_result = {"id":id,"modstamp":modstamp,"path":file_path,"userid":logSobject["LogUserId"]}
        res = util.generate_success_response('Log successfully download')
        res['log'] = log_result
        return res
Beispiel #16
0
 def execute(self):
     file_name = self.params["file_name"]
     extension = util.get_file_extension_no_period(file_name)
     mtype = util.get_meta_type_by_suffix(extension)
     full_file_path = os.path.join(config.project.location, "src", mtype["directoryName"], file_name)
     params = {
         "project_name"  : config.project.project_name,
         "file_name"     : full_file_path,
         "line_number"   : self.params.get("line_number", 0)
     } 
     config.connection.run_subl_command("open_file_in_project", json.dumps(params))
     return util.generate_success_response("ok")
Beispiel #17
0
 def sign_in_with_github(self, creds):
     try:
         response = github.sign_in(creds)
         if 'message' in response:
             return util.generate_error_response(response['message'])
         elif 'authentication' in response:
             src = open(os.path.join(self.get_app_settings_directory(),'.github.json'), "wb")
             src.write(json.dumps(response, sort_keys=False, indent=4))
             src.close()
             return util.generate_success_response('Connected to GitHub successfully!')
         else:
             return util.generate_error_response(response)
     except Exception, e:
         return util.generate_error_response("Error connecting to GitHub: "+e.message)
Beispiel #18
0
 def execute(self):  
     org_connections = GetOrgConnectionsCommand(params=self.params).execute()
     config.logger.debug('=======')
     config.logger.debug(org_connections)
     updated_org_connections = []
     for connection in org_connections:
         if connection['id'] != self.params['id']:
             updated_org_connections.append(connection)
     src = open(os.path.join(config.project.location,"config",".org_connections"), 'wb')
     json_data = json.dumps(updated_org_connections, sort_keys=False, indent=4)
     src.write(json_data)
     src.close()
     util.delete_password_by_key(self.params['id'])
     return util.generate_success_response('Org Connection Successfully Deleted')
Beispiel #19
0
 def execute(self):
     file_name = self.params["file_name"]
     extension = util.get_file_extension_no_period(file_name)
     mtype = util.get_meta_type_by_suffix(extension)
     full_file_path = os.path.join(config.project.location, "src",
                                   mtype["directoryName"], file_name)
     params = {
         "project_name": config.project.project_name,
         "file_name": full_file_path,
         "line_number": self.params.get("line_number", 0)
     }
     config.connection.run_subl_command("open_file_in_project",
                                        json.dumps(params))
     return util.generate_success_response("ok")
Beispiel #20
0
 def sign_in_with_github(self, creds):
     try:
         response = github.sign_in(creds)
         if 'message' in response:
             return util.generate_error_response(response['message'])
         elif 'authentication' in response:
             src = open(os.path.join(self.get_app_settings_directory(),'.github.json'), "wb")
             src.write(json.dumps(response, sort_keys=False, indent=4))
             src.close()
             return util.generate_success_response('Connected to GitHub successfully!')
         else:
             return util.generate_error_response(response)
     except Exception, e:
         return util.generate_error_response("Error connecting to GitHub: "+e.message)
Beispiel #21
0
    def execute(self):
        if 'package' not in self.params:
            raise MMException('"package" definition required in JSON body')
        package = self.params['package']

        #intercept and overwrite customobject retrieve to include standard objects
        if 'CustomObject' in package:
            for member in package['CustomObject']:
                if member == "*":
                    pass
                    #TODO

        clean_result = config.project.clean(package=package,overwrite_package_xml=True)
        if clean_result['success'] == True:
            return util.generate_success_response('Project Edited Successfully')
        else:
            return util.generate_error_response(clean_result['body'])
Beispiel #22
0
    def execute(self):
        if 'package' not in self.params:
            raise MMException('"package" definition required in JSON body')
        package = self.params['package']

        #intercept and overwrite customobject retrieve to include standard objects
        if 'CustomObject' in package:
            for member in package['CustomObject']:
                if member == "*":
                    pass
                    #TODO

        clean_result = config.project.clean(package=package,
                                            overwrite_package_xml=True)
        if clean_result['success'] == True:
            return util.generate_success_response(
                'Project Edited Successfully')
        else:
            return util.generate_error_response(clean_result['body'])
Beispiel #23
0
 def execute(self):
     c = MavensMateClient(credentials={
         "username"  :   self.params['username'],
         "password"  :   self.params['password'],
         "org_type"  :   self.params['org_type']
     })
     org_connection_id = util.new_mavensmate_id()
     util.put_password_by_key(org_connection_id, self.params['password'])
     org_connections = GetOrgConnectionsCommand(params=self.params).execute()
     org_connections.append({
         'id'            : org_connection_id,
         'username'      : self.params['username'],
         'environment'   : self.params['org_type']
     })
     src = open(os.path.join(config.project.location,"config",".org_connections"), 'wb')
     json_data = json.dumps(org_connections, sort_keys=False, indent=4)
     src.write(json_data)
     src.close()
     return util.generate_success_response('Org Connection Successfully Created')
Beispiel #24
0
    def execute(self):
        number_of_checkpoints = 0
        #user_id = self.params.get('user_id', config.sfdc_client.user_id)
        limit   = self.params.get('limit', 20)
        checkpoint_results = config.sfdc_client.get_apex_checkpoint_results(config.sfdc_client.user_id, limit)
        if 'records' in checkpoint_results:
            number_of_checkpoints = len(checkpoint_results['records'])
            if os.path.isdir(os.path.join(config.project.location,"debug","checkpoints")):
                shutil.rmtree(os.path.join(config.project.location,"debug","checkpoints"))

            os.makedirs(os.path.join(config.project.location,"debug","checkpoints"))

            apex_entity_to_lines = {}
            for r in checkpoint_results['records']:
                if 'HeapDump' in r and 'className' in r['HeapDump']:
                    if r['HeapDump']['className'] not in apex_entity_to_lines:
                        apex_entity_to_lines[r['HeapDump']['className']] = [r['Line']]
                    else:
                        apex_entity_to_lines[r['HeapDump']['className']].append(r['Line'])

            for apex_entity_name, lines in apex_entity_to_lines.items():
                if not os.path.isdir(os.path.join(config.project.location,"debug","checkpoints",apex_entity_name)):
                    os.makedirs(os.path.join(config.project.location,"debug","checkpoints",apex_entity_name))
                for l in lines:
                    if not os.path.isdir(os.path.join(config.project.location,"debug","checkpoints",apex_entity_name,str(l))):
                        os.makedirs(os.path.join(config.project.location,"debug","checkpoints",apex_entity_name,str(l)))

            for r in checkpoint_results['records']:
                if 'HeapDump' in r and 'className' in r['HeapDump']:
                    modstamp = r["HeapDump"]["heapDumpDate"]
                    if config.is_windows:
                        modstamp = modstamp.replace(':', ' ')
                    file_name = modstamp+"-"+r["UserId"]+".json"
                    file_path = os.path.join(config.project.location,"debug","checkpoints",r['HeapDump']['className'],str(r['Line']),file_name)
                    src = open(file_path, "w")
                    src.write(json.dumps(r,sort_keys=True,indent=4))
                    src.close()
        else:
            config.logger.debug("No checkpoints to download")

        return util.generate_success_response(str(number_of_checkpoints)+' Checkpoints successfully downloaded')
Beispiel #25
0
    def execute(self):
        objects = []
        if config.project != None and config.project.location != None:
            if config.connection.get_plugin_client_setting('mm_use_org_metadata_for_completions', False):
                if os.path.isfile(os.path.join(config.project.location,"config",".org_metadata")): #=> parse org metadata, looking for object names
                    jsonData = util.parse_json_from_file(os.path.join(config.project.location,"config",".org_metadata"))
                    for metadata_type in jsonData:
                        if 'xmlName' in metadata_type and metadata_type['xmlName'] == 'CustomObject':
                            for object_type in metadata_type['children']:
                                objects.append({
                                    'type' : 'CustomObject',
                                    'name' : object_type['text']
                                })

        custom_apex_classes = []
        if config.project != None and config.project.location != None:
            if os.path.isdir(os.path.join(config.project.location,"config",".symbols")): #=> get list of classes
                for (dirpath, dirnames, filenames) in os.walk(os.path.join(config.project.location,"config",".symbols")):
                    for f in filenames:
                        if '-meta.xml' in f: continue
                        class_name = f.replace(".json", "")
                        custom_apex_classes.append({
                            'type' : 'Custom Apex Class',
                            'name' : class_name
                        })
            
        standard_apex_classes = []
        apex_completions = util.parse_json_from_file(os.path.join(config.base_path, config.support_dir, "sforce", "metadata", "apex.json"))
        for top_level_class_name in apex_completions["publicDeclarations"]["System"].keys():
            standard_apex_classes.append({
                'type' : 'Standard Apex Class',
                'name' : top_level_class_name
            })

        response = {
            'standard' : standard_apex_classes,
            'custom' : custom_apex_classes,
            'objects' : objects
        }

        return util.generate_success_response(response, "string")
Beispiel #26
0
    def execute(self):
        sfdc_client = config.sfdc_client

        empty_package_xml = util.get_empty_package_xml_contents()
        tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True)
        util.put_empty_package_xml_in_directory(tmp_unpackaged,
                                                empty_package_xml)
        zip_file = util.zip_directory(tmp, tmp)
        deploy_params = {
            "zip_file": zip_file,
            "rollback_on_error": True,
            "ret_xml": True,
            "classes": self.params.get('classes', []),
            "debug_categories": self.params.get('debug_categories', [])
        }
        deploy_result = sfdc_client.deploy(deploy_params, is_test=True)
        #debug(deploy_result)
        d = xmltodict.parse(deploy_result,
                            postprocessor=util.xmltodict_postprocessor)
        if int(float(util.SFDC_API_VERSION)) >= 29:
            result = d["soapenv:Envelope"]["soapenv:Body"][
                'checkDeployStatusResponse']['result']['details'][
                    'runTestResult']
        else:
            result = d["soapenv:Envelope"]["soapenv:Body"][
                'checkDeployStatusResponse']['result']['runTestResult']

        try:
            result['log'] = d["soapenv:Envelope"]["soapenv:Header"][
                "DebuggingInfo"]["debugLog"]
        except:
            result['log'] = 'Log not available.'

        shutil.rmtree(tmp)

        if self.args.respond_with_html:
            html = util.generate_html_response(self.args.operation, result,
                                               self.params)
            return util.generate_success_response(html, "html")
        else:
            return result
Beispiel #27
0
    def execute(self):
        tag = self.params.get("tag", None) #tag should be formatted like: apex:tagname or flow:interview or support:caseFeed, etc.
        if tag == None:
            raise MMException('Please provide tag')
        
        import mm.vf as vf
        completions = [] 
        if tag in vf.tag_defs:
            def_entry = vf.tag_defs[tag]

            for key, value in def_entry['attribs'].items():
                definition = {
                    'attribute' : key,
                    'type' : value['type']
                }
                
                if 'values' in value:
                    definition['values'] = value['values']

                completions.append(definition)

        return util.generate_success_response(completions, "array")
Beispiel #28
0
    def execute(self):
        data = self.params.get("data", None)
        
        # example of data:
        #
        # public with sharing class AUTOTEST {

        #     String someString;
        #     public String myPublicString { get; set; }

        #     public AUTOTEST(String foo , Boolean bar) {
        #         ApexPages.StandardController c;
        #         c.cancel();
        #         String s = 'foo';
        #         s.

        if data == None:
            raise MMException('Please provide data')

        typedef = parsehelp.get_type_definition(data)

        return util.generate_success_response(list(typedef), "array")
Beispiel #29
0
    def execute(self):
        data = self.params.get("data", None)
        
        # example of data:
        #
        # public with sharing class AUTOTEST {

        #     public class MyClass {
        #         public String foo;
        #         public String bar;
        #     }

        #     public AUTOTEST(String foo, Boolean bar) {
        #         MyClass foo = new MyClass();
        #         foo.

        if data == None:
            raise MMException('Please provide data')

        class_def = parsehelp.extract_class(data)

        return util.generate_success_response(list(class_def), "array")
Beispiel #30
0
    def execute(self):
        project = config.project
        sfdc_client = config.sfdc_client
        files = self.params.get('files', None)
        for f in files:
            if '-meta.xml' in f:
                corresponding_file = f.split('-meta.xml')[0]
                if corresponding_file not in files:
                    files.append(corresponding_file)
        for f in files:
            if '-meta.xml' in f:
                continue
            file_ext = f.split('.')[-1]
            metadata_type = util.get_meta_type_by_suffix(file_ext)
            if metadata_type['metaFile'] == True:
                corresponding_file = f + '-meta.xml'
                if corresponding_file not in files:
                    files.append(corresponding_file)

        metadata_package_dict = util.get_metadata_hash(files)
        tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True)
        package_xml = util.get_package_xml_contents(metadata_package_dict)
        util.put_package_xml_in_directory(tmp_unpackaged, package_xml, True)
        empty_package_xml = util.get_empty_package_xml_contents()
        util.put_empty_package_xml_in_directory(tmp_unpackaged, empty_package_xml)
        zip_file = util.zip_directory(tmp, tmp)
        
        purge_on_delete_setting = config.connection.get_plugin_client_setting("mm_purge_on_delete", False);
        if purge_on_delete_setting:
            describe_result = config.sfdc_client.describeMetadata(retXml=False)
            if describe_result.testRequired == True:
                purge_on_delete_setting = False

        deploy_params = {
            "zip_file"          : zip_file,
            "rollback_on_error" : True,
            "ret_xml"           : True,
            "purge_on_delete"   : purge_on_delete_setting
        }
        delete_result = sfdc_client.delete(deploy_params)
        d = xmltodict.parse(delete_result,postprocessor=util.xmltodict_postprocessor)
        shutil.rmtree(tmp)
        result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']
        if result['success'] == True:
            removed = []
            for f in files:
                try:
                    file_ext = f.split('.')[-1]
                    metadata_type = util.get_meta_type_by_suffix(file_ext)
                    if metadata_type == None or not 'directoryName' in metadata_type:
                        continue;
                    directory = metadata_type['directoryName']
                    filepath = os.path.join(project.location, "src", directory, f)
                    metapath = os.path.join(project.location, "src", directory, f + '-meta.xml')
                    os.remove(filepath)
                    os.remove(metapath)
                    # remove the entry in file properties
                    project.conflict_manager.remove_from_local_store(f)
                    removed.append(f)
                except Exception, e:
                    print e.message
            return util.generate_success_response("Removed metadata files: " + (",".join(removed)))
Beispiel #31
0
 def execute(self):
     IndexMetadataCommand(params=self.params).execute()
     return util.generate_success_response("Metadata refreshed successfully.")
Beispiel #32
0
    def execute(self):
        project = config.project
        sfdc_client = config.sfdc_client
        if "files" in self.params:
            if "type" in self.params: 
                open_type = self.params.get("type", None) 
            else:
                open_type = "edit"
            files = self.params.get("files", None)
            if len(files) > 0:
                apex_file_properties = util.parse_json_from_file(os.path.join(project.location,"config",".local_store"))
                opened = []
                for fileabs in files:
                    basename = os.path.basename(fileabs)

                    if basename not in apex_file_properties: 
                        # make sure we have meta data and then get the object type
                        if os.path.isfile(fileabs+"-meta.xml"):
                            xmldoc = minidom.parse(fileabs+"-meta.xml")
                            root = xmldoc.firstChild
                            object_type = root.nodeName
                        else:
                            continue

                        object_id = sfdc_client.get_apex_entity_id_by_name(object_type=object_type, name=basename)
                        if not object_id: 
                            continue
                    else:
                        props = apex_file_properties[basename]
                        object_type = props['type']
                        object_id = props['id']

                    # only ApexClasses that are global and have webservice scope have WSDL files
                    if open_type == "wsdl":
                        if object_type != "ApexClass":
                            continue
                        with open(fileabs, 'r') as content_file:
                            content = content_file.read()
                            p = re.compile("global\s+(abstract\s+)?class\s", re.I + re.M)
                            if not p.search(content):
                                continue
                            p = re.compile("\swebservice\s", re.I + re.M)
                            if not p.search(content): 
                                continue

                    # get the server instance url and set the redirect url
                    frontdoor = "https://" + sfdc_client.server_url.split('/')[2] + "/secur/frontdoor.jsp?sid=" + sfdc_client.sid + "&retURL="
                    if open_type == "wsdl":
                        f, e = os.path.splitext(basename)
                        ret_url = "/services/wsdl/class/" + f
                    else:
                        f, ext = os.path.splitext(basename)
                        if object_type == "CustomObject" and not f.endswith('__c'):
                            # standard object?
                            ret_url = "/p/setup/layout/LayoutFieldList?type=" + f + "%23CustomFieldRelatedList_target"                             
                        else:
                            ret_url = "/" + object_id

                    # open the browser window for this file and track it
                    webbrowser.open(frontdoor+ret_url, new=2)
                    opened.append(basename)
                if len(opened) == 0:
                    return util.generate_error_response("There were no valid files to open.")
                return util.generate_success_response("Opened "+(", ".join(opened))+" on server.")
            return util.generate_error_response("Unable to open file on server.")
        else:
            raise MMException("To open on Salesforce, you must provide an array of 'files'")
Beispiel #33
0
 def execute(self):
     config.project.update_credentials(self.params)
     return util.generate_success_response(
         'Your credentials were updated successfully')
Beispiel #34
0
    def execute(self):
        data = self.params.get("data", None)
        word = self.params.get("word", None)
        file_name = self.params.get("file_name", None)
        
        # example of data:
        #
        # public with sharing class AUTOTEST {

        #     String someString;
        #     public String myPublicString { get; set; }

        #     public AUTOTEST(String foo , Boolean bar) {
        #         ApexPages.StandardController c;
        #         c.cancel();
        #         String s = 'foo';
        #         s.

        if data == None:
            raise MMException('Please provide data')
        if file_name == None:
            raise MMException('Please provide file_name')

        apex_completions = util.parse_json_from_file(os.path.join(config.base_path, config.support_dir, "sforce", "metadata", "apex.json"))
        typedef = parsehelp.get_type_definition(data)
        
        debug('autocomplete type definition: ')
        debug(typedef)

        if '<' not in typedef[2] and '[' not in typedef[2]:
            if '.' in typedef[2] and '<' not in typedef[2]:
                type_parts = typedef[2].split('.')
                typedef_class = type_parts[0] #e.g. ApexPages
                typedef_class_lower = typedef_class.lower()
                typedef_class_extra = type_parts[1] #e.g. StandardController
                typedef_class_extra_lower = typedef_class_extra.lower()
            else:
                typedef_class = typedef[2] #e.g. ApexPages
                typedef_class_lower = typedef_class.lower()
                typedef_class_extra = typedef[4].replace('.','') #e.g. StandardController
                typedef_class_extra_lower = typedef_class_extra.lower()

            if '<' in typedef_class:
                typedef_class_lower = re.sub('\<.*?\>', '', typedef_class_lower)
                typedef_class_lower = re.sub('\<', '', typedef_class_lower)
                typedef_class_lower = re.sub('\>', '', typedef_class_lower)
                typedef_class       = re.sub('\<.*?\>', '', typedef_class)
                typedef_class       = re.sub('\<', '', typedef_class)
                typedef_class       = re.sub('\>', '', typedef_class)

            if '[' in typedef_class:
                typedef_class_lower = re.sub('\[.*?\]', '', typedef_class_lower)
                typedef_class       = re.sub('\[.*?\]', '', typedef_class)
        else:
            if '<' in typedef[2]:
                typedef_class = typedef[2].split('<')[0]
            elif '[' in typedef[2]:
                typedef_class = typedef[2].split('[')[0]
            typedef_class_lower = typedef_class.lower()
            typedef_class_extra = ''
            typedef_class_extra_lower = ''


        debug('autocomplete type: ')
        debug(typedef_class) #String
        debug('autocomplete type extra: ')
        debug(typedef_class_extra) #String

        if word != None and word == 'Page' and os.path.isdir(os.path.join(config.project.location,"src","pages")):
            for (dirpath, dirnames, filenames) in os.walk(os.path.join(config.project.location,"src","pages")):
                for f in filenames:
                    if '-meta.xml' in f: continue
                    base_page_name = f.replace(".page", "")
                    completions.append({
                        'type' : "Visualforce Page",
                        'name' : base_page_name
                    })
            
            return util.generate_success_response(completions, 'array')

        if len(typedef[4]) > 1 and '.' in typedef[4]:
            #deeply nested, need to look for properties
            #TODO 
            return util.generate_success_response([], 'array')

        # 
        # Is typedef_class a STANDARD Apex class?
        # 
        apex_class_key = typedef_class
        if apex_class_key == 'DateTime':
            apex_class_key = 'Datetime'

        if apex_class_key in apex_completions["publicDeclarations"] and typedef_class_extra_lower == '':
            apex_class_key = word
            if apex_class_key == 'DateTime':
                apex_class_key = 'Datetime'
            comp_def = apex_completions["publicDeclarations"].get(apex_class_key)
            for i in comp_def:
                completions.append(i)
            return util.generate_success_response(sorted(completions), 'array')

        elif apex_completions["publicDeclarations"].get(apex_class_key) != None:
            top_level = apex_completions["publicDeclarations"].get(typedef_class)
            sub_def = top_level.get(word)
            if sub_def == None:
                sub_def = top_level.get(typedef_class_extra)
            completions = get_symbol_table_completions(sub_def)
            return util.generate_success_response(sorted(completions), 'array')

        elif apex_class_key in apex_completions["publicDeclarations"]["System"]:
            if typedef_class == 'DateTime':
                typedef_class = 'Datetime'
            if word == typedef_class: #static
                comp_def = apex_completions["publicDeclarations"]["System"].get(apex_class_key)
            else: #instance
                comp_def = apex_completions["publicDeclarations"]["System"].get(typedef_class)
            completions = get_symbol_table_completions(comp_def)
            return util.generate_success_response(sorted(completions), 'array')

        # 
        # Is typedef_class a CUSTOM Apex class?
        # 

        # HANDLE CUSTOM APEX CLASS STATIC METHODS 
        # e.g. ===> MyCustomClass.doSomethingCool
        elif word != None and os.path.isfile(os.path.join(config.project.location,"src","classes",word+".cls")):
            try:
                completions = get_apex_completions(word) 
                return util.generate_success_response(sorted(completions), 'array')

            except:
                return util.generate_success_response([], 'array')

        if typedef_class_lower == None:
            return util.generate_success_response([], 'array')

        # HANDLE CUSTOM APEX INSTANCE METHOD ## 
        # MyClass foo = new MyClass()
        # e.g. ===> foo.??  

        # TODO: do we still need this given the existence of symbol tables, i don't think so?
        # clazz = parsehelp.extract_class(data)
        # inheritance = parsehelp.extract_inheritance(data, clazz)
        # if inheritance != None:
        #     if os.path.isfile(os.path.join(config.project.location,"src","classes",inheritance+".cls")): #=> apex classes
        #         completions = util.get_apex_completions(inheritance, typedef_class)
        #         return sorted(completions)
        
        # get symbol table for the seed file
        symbol_table = get_symbol_table(file_name)
        
        if symbol_table != None and "innerClasses" in symbol_table and type(symbol_table["innerClasses"] is list and len(symbol_table["innerClasses"]) > 0):
            for ic in symbol_table["innerClasses"]:
                if ic["name"].lower() == typedef_class_lower:
                    completions = get_completions_for_inner_class(ic)
                    return util.generate_success_response(sorted(completions), 'array')

        if os.path.isfile(os.path.join(config.project.location,"src","classes",typedef_class+".cls")): #=> apex classes
            completions = get_apex_completions(typedef_class, typedef_class_extra)
            return util.generate_success_response(sorted(completions), 'array')


        
        
        #TODO: finish
        return util.generate_success_response([], 'array')

        if typedef_class.endswith('__r'):
            typedef_class = typedef_class.replace('__r', '__c')
        if os.path.isfile(os.path.join(config.project.location,"src","objects",typedef_class+".object")): #=> object fields from src directory (more info on field metadata, so is primary)
            object_dom = parse(os.path.join(config.project.location,"src","objects",typedef_class+".object"))
            for node in object_dom.getElementsByTagName('fields'):
                field_name = ''
                field_type = ''
                for child in node.childNodes:                            
                    if child.nodeName != 'fullName' and child.nodeName != 'type': continue
                    if child.nodeName == 'fullName':
                        field_name = child.firstChild.nodeValue
                    elif child.nodeName == 'type':
                        field_type = child.firstChild.nodeValue
                completions.append((field_name+" \t"+field_type, field_name))
            return sorted(completions)
        elif os.path.isfile(os.path.join(config.project.location,"config",".org_metadata")) and settings.get('mm_use_org_metadata_for_completions', False): #=> parse org metadata, looking for object fields
            jsonData = util.parse_json_from_file(os.path.join(config.project.location,"config",".org_metadata"))
            for metadata_type in jsonData:
                if 'xmlName' in metadata_type and metadata_type['xmlName'] == 'CustomObject':
                    for object_type in metadata_type['children']:
                        if 'text' in object_type and object_type['text'].lower() == typedef_class_lower:
                            for attr in object_type['children']:
                                if 'text' in attr and attr['text'] == 'fields':
                                    for field in attr['children']:
                                        completions.append((field['text'], field['text']))
            if len(completions) == 0 and '__c' in typedef_class_lower:
                try:
                    #need to index custom objects here, because it couldnt be found
                    if len(ThreadTracker.get_pending_mm_panel_threads(sublime.active_window())) == 0:
                        params = {
                            'metadata_types' : ['CustomObject']
                        }
                        mm.call('refresh_metadata_index', False, params=params)
                except:
                    debug('Failed to index custom object metadata')
            else:
                completions.append(('Id', 'Id'))
                return (sorted(completions), completion_flags)
        else:
            return []
Beispiel #35
0
                        os.unlink(file_path)
                except Exception, e:
                    print e
            number_of_logs = len(logs)
            for log in logs:
                modstamp = log["modstamp"]
                if config.is_windows:
                    modstamp = modstamp.replace(':', ' ')
                file_name = modstamp+"-"+log["userid"]+".log"
                src = open(os.path.join(config.connection.workspace,config.project.project_name,"debug","logs",file_name), "w")
                src.write(log["log"])
                src.close()
        else:
            config.logger.debug("No logs to download")

        return util.generate_success_response(str(number_of_logs)+' Logs successfully downloaded')

class NewTraceFlagCommand(Command):
    aliases=["new_log"]
    def execute(self):
        """
            params = {
                "ApexCode"          : "None",
                "ApexProfiling"     : "01pd0000001yXtYAAU",
                "Callout"           : True,
                "Database"          : 1,
                "ExpirationDate"    : 3,
                "ScopeId"           : "",
                "System"            : "",
                "TracedEntityId"    : "",
                "Validation"        : "",
Beispiel #36
0
 def execute(self):
     config.project.update_credentials(self.params)
     return util.generate_success_response('Your credentials were updated successfully')
Beispiel #37
0
    def execute(self):
        archive_deployments = config.connection.get_plugin_client_setting("mm_archive_deployments", True)
        finish_deploy = self.params.get('finish', False)
        compare = config.connection.get_plugin_client_setting("mm_compare_before_deployment", True)
        destinations = self.params['destinations']
        deploy_metadata = config.sfdc_client.retrieve(package=self.params['package'])
        deploy_name = self.params.get('new_deployment_name', None)
        threads = []
        
        if not finish_deploy and compare:
            source_retrieve_result = config.sfdc_client.retrieve(package=self.params['package'])
            debug('source_retrieve_result')
            debug(source_retrieve_result)

            source_dict = {}
            for fp in source_retrieve_result.fileProperties:
                source_dict[fp.fileName] = fp

            debug('source_dict')
            debug(source_dict) 

            #need to compare package.xml to destination orgs here
            for destination in destinations:
                thread = CompareHandler(config.project, destination, self.params, self.params['package'])
                threads.append(thread)
                thread.start()  
                
            compare_results = []
            for thread in threads:
                thread.join()  
                compare_results.append(thread.result)
            
            debug('compare_results')
            debug(compare_results)
            destination_dict = {}

            for cr in compare_results:
                if 'success' in cr and cr['success'] == False:
                    destination_dict[cr['username']] = cr
                else:
                    cr_dict = {}
                    for fpfp in cr.fileProperties:
                        cr_dict[fpfp.fileName] = fpfp
                    destination_dict[cr.username] = cr_dict

            debug('destination_dict')
            debug(destination_dict)    

            final_compare_result = {}
            for d in destinations:
                final_compare_result[d['username']] = {}

            for file_name, file_details in source_dict.iteritems():
                if 'package.xml' in file_name:
                    continue; 
                for username, username_value in destination_dict.iteritems():
                    if 'success' in username_value and username_value['success'] == False:
                        final_compare_result[username] = username_value
                    else:
                        destination_retrieve_details = destination_dict[username]
                        
                        if 'package.xml' in file_name:
                            continue

                        short_file_name = file_name.split('/')[-1]
                        mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1])
       
                        if file_name not in destination_retrieve_details:
                            final_compare_result[username][short_file_name] = {
                                'name' : short_file_name,
                                'type' : mtype['xmlName'],
                                'action': 'insert',
                                'message' : 'Create'
                            }
                        else:
                            destination_file_detail = destination_retrieve_details[file_name]
                            source_file_detail = source_dict[file_name]
                            if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate:
                                final_compare_result[username][file_name] = {
                                    'name' : short_file_name,
                                    'type' : mtype['xmlName'],
                                    'action' : 'update',
                                    'message' : 'You will overwrite this file'
                                }
                            else:
                                final_compare_result[username][file_name] = {
                                    'name' : short_file_name,
                                    'type' : mtype['xmlName'],
                                    'action' : 'update_conflict',
                                    'message' : 'Destination file is newer than source file'
                                }
            
            debug('final_compare_result')
            debug(final_compare_result) 

            if self.args.respond_with_html == True:
                html = util.generate_html_response('deploy_compare', final_compare_result, self.params)
                response = util.generate_success_response(html, "html") # returns json
                response['compare_success'] = True
                # if deployment to one org fails, the entire deploy was not successful
                # for result in final_compare_result:
                #     if result['success'] == False:
                #         response['compare_success'] = False
                #         break
                return json.dumps(response)
            else:
                return json.dumps(final_compare_result,indent=4)   

        for destination in destinations:
            if archive_deployments:
                deploy_path = os.path.join(config.project.location,"deploy",destination['username'])
                if not os.path.exists(deploy_path):
                    os.makedirs(deploy_path)
                if not os.path.isfile(os.path.join(config.project.location,"deploy",'.config')):
                    config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb')
                    config_file_contents = { 
                        'deployments' : {
                            'named' : [],
                            'timestamped' : []
                        }
                    }
                    config_file.write(json.dumps(config_file_contents))
                    config_file.close()   

                ts = time.time()
                if not config.is_windows:
                    timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
                else:
                    timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H %M %S')

                if deploy_name:
                    if os.path.isdir(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)):
                        shutil.rmtree(os.path.join(config.project.location,"deploy",destination['username'],deploy_name))
                    os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],deploy_name))
                    util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],deploy_name))

                    config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config'))
                    named_deployment = {
                        'destination' : destination['username'],
                        'name' : deploy_name,
                        'timestamp' : timestamp,
                        'id' : util.get_random_string(30),
                        'package' : os.path.join(config.project.location,"deploy",destination['username'],deploy_name,'unpackaged','package.xml')
                    }
                    config_file_json['deployments']['named'].append(named_deployment)
                    config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb')
                    config_file.write(json.dumps(config_file_json))
                    config_file.close()
                else:
                    os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],timestamp))
                    util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],timestamp))

                    config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config'))
                    timestamped_deployment = {
                        'destination' : destination['username'],
                        'timestamp' : timestamp,
                        'id' : util.get_random_string(30),
                        'package' : os.path.join(config.project.location,"deploy",destination['username'],timestamp,'unpackaged','package.xml')
                    }
                    config_file_json['deployments']['timestamped'].append(timestamped_deployment)
                    config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb')
                    config_file.write(json.dumps(config_file_json))
                    config_file.close()

            thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata)
            threads.append(thread)
            thread.start()  
        
        deploy_results = []
        for thread in threads:
            thread.join()  
            deploy_results.append(thread.result)
                
        if self.args.respond_with_html == True:
            html = util.generate_html_response(self.args.operation, deploy_results, self.params)
            response = util.generate_success_response(html, "html") # returns json
            response['deploy_success'] = True
            # if deployment to one org fails, the entire deploy was not successful
            for result in deploy_results:
                if result['success'] == False:
                    response['deploy_success'] = False
                    break
            return json.dumps(response)
        else:
            return json.dumps(deploy_results,index=4)
Beispiel #38
0
 def execute(self):
     debug_users = config.project.get_debug_users()
     debug_settings = config.project.get_debug_settings()
     for user_id in debug_users:
         config.sfdc_client.delete_trace_flags(user_id)    
     return util.generate_success_response('Stopped logging for debug users.')
Beispiel #39
0
    def execute(self):
        if int(float(util.SFDC_API_VERSION)
               ) <= 28 or config.connection.get_plugin_client_setting(
                   "mm_use_legacy_test_ui", False):
            #raise MMException("This command requires mm_api_version to be set to 29.0 or higher.")
            return RunUnitTestsCommand(params=self.params,
                                       args=self.args).execute()

        project = config.project
        sfdc_client = config.sfdc_client

        generate_logs = self.params.get("generate_logs", False)
        if generate_logs:
            NewQuickTraceFlagCommand(params={
                "running_user_only": True
            }).execute()

        test_classes = self.params.get("classes", None)
        debug('running tests for')
        debug(test_classes)
        if test_classes == None or test_classes == []:  #need to run all tests in project
            classes = []
            triggers = []
            test_classes = []
            for dirname, dirnames, filenames in os.walk(
                    os.path.join(project.location, "src", "classes")):
                for filename in filenames:
                    if "test" in filename.lower(
                    ) and "-meta.xml" not in filename:
                        test_classes.append(
                            util.get_file_name_no_extension(filename))
                    elif "-meta.xml" not in filename:
                        classes.append(
                            util.get_file_name_no_extension(filename))
            for dirname, dirnames, filenames in os.walk(
                    os.path.join(project.location, "src", "triggers")):
                for filename in filenames:
                    if "-meta.xml" not in filename:
                        triggers.append(
                            util.get_file_name_no_extension(filename))
        else:  #user has specified certain tests to run
            classes = []
            triggers = []
            for dirname, dirnames, filenames in os.walk(
                    os.path.join(project.location, "src", "classes")):
                for filename in filenames:
                    if "test" not in filename.lower(
                    ) and "-meta.xml" not in filename:
                        classes.append(
                            util.get_file_name_no_extension(filename))
            for dirname, dirnames, filenames in os.walk(
                    os.path.join(project.location, "src", "triggers")):
                for filename in filenames:
                    if "-meta.xml" not in filename:
                        triggers.append(
                            util.get_file_name_no_extension(filename))

        params = {"files": test_classes}
        test_results = sfdc_client.run_async_apex_tests(params, False)

        params = {
            "classes": classes,
            "triggers": triggers,
            "test_classes": test_classes
        }
        coverage_report = sfdc_client.get_apex_test_coverage(
            params, transform_ids=True)
        debug(">>>>>>>>>>")
        debug(coverage_report)
        result = {"test_results": test_results, "coverage": coverage_report}

        if self.args.respond_with_html:
            html = util.generate_html_response(self.args.operation, result,
                                               self.params)
            return util.generate_success_response(html, "html")
        else:
            return result
Beispiel #40
0
 def execute(self):
     import mm.vf as vf 
     return util.generate_success_response(vf.tag_list, "array")
Beispiel #41
0
 def execute(self):
     users       = self.params.get('users', None)
     levels      = self.params.get('debug_categories', None)
     expiration  = self.params.get('expiration', None)
     config.project.put_debug_file(users, levels, expiration)
     return util.generate_success_response("Debug settings updated successfully")