Пример #1
0
    def execute(self):
        project = config.project
        if 'directories' in self.params and len(self.params['directories']) == 1 and os.path.basename(self.params['directories'][0]) == "src":
            return project.clean(reset_metadata_container=False)
        else:
            retrieve_result = project.get_retrieve_result(self.params)
            #take this opportunity to freshen the cache
            project.conflict_manager.refresh_local_store(retrieve_result.fileProperties)
            util.extract_base64_encoded_zip(retrieve_result.zipFile, project.location)

            #TODO: handle exception that could render the project unusable bc of lost files
            #replace project metadata with retrieved metadata
            for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"unpackaged")):
                for filename in filenames:
                    full_file_path = os.path.join(dirname, filename)
                    if '/unpackaged/package.xml' in full_file_path or '\\unpackaged\\package.xml' in full_file_path:
                        continue
                    if 'win32' in sys.platform:
                        destination = full_file_path.replace('\\unpackaged\\', '\\src\\')
                    else:
                        destination = full_file_path.replace('/unpackaged/', '/src/')
                    destination_directory = os.path.dirname(destination)
                    if not os.path.exists(destination_directory):
                        os.makedirs(destination_directory)
                    shutil.move(full_file_path, destination)
            shutil.rmtree(os.path.join(project.location,"unpackaged"))
            if os.path.exists(os.path.join(project.location,"metadata.zip")):
                os.remove(os.path.join(project.location,"metadata.zip"))
            return util.generate_success_response("Refresh Completed Successfully")
Пример #2
0
    def execute(self):
        project = config.project
        if 'directories' in self.params and len(self.params['directories']) == 1 and os.path.basename(self.params['directories'][0]) == "src":
            return project.clean(reset_metadata_container=False)
        else:
            retrieve_result = project.get_retrieve_result(self.params)
            #take this opportunity to freshen the cache
            project.conflict_manager.refresh_local_store(retrieve_result.fileProperties)
            util.extract_base64_encoded_zip(retrieve_result.zipFile, project.location)

            #TODO: handle exception that could render the project unusable bc of lost files
            #replace project metadata with retrieved metadata
            for dirname, dirnames, filenames in os.walk(os.path.join(project.location,"unpackaged")):
                for filename in filenames:
                    full_file_path = os.path.join(dirname, filename)
                    if '/unpackaged/package.xml' in full_file_path or '\\unpackaged\\package.xml' in full_file_path:
                        continue
                    if 'win32' in sys.platform:
                        destination = full_file_path.replace('\\unpackaged\\', '\\src\\')
                    else:
                        destination = full_file_path.replace('/unpackaged/', '/src/')
                    destination_directory = os.path.dirname(destination)
                    if not os.path.exists(destination_directory):
                        os.makedirs(destination_directory)
                    shutil.move(full_file_path, destination)
            shutil.rmtree(os.path.join(project.location,"unpackaged"))
            if os.path.exists(os.path.join(project.location,"metadata.zip")):
                os.remove(os.path.join(project.location,"metadata.zip"))
            return util.generate_success_response("Refresh Completed Successfully")
Пример #3
0
    def execute(self):
        archive_deployments = config.connection.get_plugin_client_setting(
            "mm_archive_deployments", True)
        deploy_metadata = config.sfdc_client.retrieve(
            package=self.params['package'])
        threads = []
        for destination in self.params['destinations']:
            if archive_deployments:
                deploy_path = os.path.join(config.project.location, "deploy",
                                           destination['username'])
                if not os.path.exists(deploy_path):
                    os.makedirs(deploy_path)
                ts = time.time()
                if not config.is_windows:
                    timestamp = datetime.datetime.fromtimestamp(ts).strftime(
                        '%Y-%m-%d %H:%M:%S')
                else:
                    timestamp = datetime.datetime.fromtimestamp(ts).strftime(
                        '%Y-%m-%d %H %M %S')
                os.makedirs(
                    os.path.join(config.project.location, "deploy",
                                 destination['username'], timestamp))
                util.extract_base64_encoded_zip(
                    deploy_metadata.zipFile,
                    os.path.join(config.project.location, "deploy",
                                 destination['username'], timestamp))

            thread = DeploymentHandler(config.project, destination,
                                       self.params, deploy_metadata)
            threads.append(thread)
            thread.start()
        deploy_results = []
        for thread in threads:
            thread.join()
            deploy_results.append(thread.result)

        if self.args.respond_with_html == True:
            html = util.generate_html_response(self.args.operation,
                                               deploy_results, self.params)
            response = json.loads(util.generate_success_response(html, "html"))
            response['deploy_success'] = True
            # if deployment to one org fails, the entire deploy was not successful
            for result in deploy_results:
                if result['success'] == False:
                    response['deploy_success'] = False
                    break
            return json.dumps(response)
        else:
            return json.dumps(deploy_results, index=4)
Пример #4
0
Файл: deploy.py Проект: azam/mm
    def execute(self):
        archive_deployments = config.connection.get_plugin_client_setting("mm_archive_deployments", True)
        deploy_metadata = config.sfdc_client.retrieve(package=self.params['package'])
        threads = []
        for destination in self.params['destinations']:
            if archive_deployments:
                deploy_path = os.path.join(config.project.location,"deploy",destination['username'])
                if not os.path.exists(deploy_path):
                    os.makedirs(deploy_path)
                ts = time.time()
                if not config.is_windows:
                    timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
                else:
                    timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H %M %S')
                os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],timestamp))
                util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],timestamp))

            thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata)
            threads.append(thread)
            thread.start()  
        deploy_results = []
        for thread in threads:
            thread.join()  
            deploy_results.append(thread.result)
                
        if self.args.respond_with_html == True:
            html = util.generate_html_response(self.args.operation, deploy_results, self.params)
            response = json.loads(util.generate_success_response(html, "html"))
            response['deploy_success'] = True
            # if deployment to one org fails, the entire deploy was not successful
            for result in deploy_results:
                if result['success'] == False:
                    response['deploy_success'] = False
                    break
            return json.dumps(response)
        else:
            return json.dumps(deploy_results,index=4)
Пример #5
0
    def listMetadataAdvanced(self, metadata_type):
        try:
            metadata_type_def = util.get_meta_type_by_name(metadata_type)
            if metadata_type_def == None:
                return []
            has_children_metadata = False
            if 'childXmlNames' in metadata_type_def and type(
                    metadata_type_def['childXmlNames']) is list:
                has_children_metadata = True
            is_folder_metadata = 'inFolder' in metadata_type_def and metadata_type_def[
                'inFolder']
            if is_folder_metadata == True:
                metadata_request_type = self.__transformFolderMetadataNameForListRequest(
                    metadata_type)
            else:
                metadata_request_type = metadata_type
            list_response = self.listMetadata(metadata_request_type, True,
                                              util.SFDC_API_VERSION)
            debug('--------------->')
            debug(list_response)
            if type(list_response) is not list:
                list_response = [list_response]
            #print list_response
            object_hash = {
            }  #=> {"Account" => [ {"fields" => ["foo", "bar"]}, "listviews" => ["foo", "bar"] ], "Contact" => ... }

            if has_children_metadata == True and len(
                    list_response
            ) > 0:  #metadata objects like customobject, workflow, etc.
                request_names = []
                for element in list_response:
                    #if element['fullName'] != 'PersonAccount':
                    request_names.append(element['fullName'])
                retrieve_result = self.retrieve(
                    package={metadata_request_type: request_names})
                #print '>>>> ',retrieve_result
                tmp = util.put_tmp_directory_on_disk()
                util.extract_base64_encoded_zip(retrieve_result.zipFile, tmp)

                #iterate extracted directory
                for dirname, dirnames, filenames in os.walk(
                        os.path.join(tmp, "unpackaged",
                                     metadata_type_def['directoryName'])):
                    for f in filenames:
                        #f => Account.object
                        full_file_path = os.path.join(dirname, f)
                        data = util.parse_xml_from_file(full_file_path)
                        c_hash = {}
                        for child_type in metadata_type_def['childXmlNames']:
                            child_type_def = util.get_meta_type_by_name(
                                child_type)
                            if child_type_def == None:  #TODO handle newer child types
                                continue
                            tag_name = child_type_def['tagName']
                            items = []
                            try:
                                if tag_name in data[metadata_request_type]:
                                    if type(data[metadata_request_type]
                                            [tag_name]) is not list:
                                        data[metadata_request_type][
                                            tag_name] = [
                                                data[metadata_request_type]
                                                [tag_name]
                                            ]
                                    for i, val in enumerate(
                                            data[metadata_request_type]
                                        [tag_name]):
                                        items.append(val['fullName'])
                            except BaseException, e:
                                #print 'exception >>>> ', e.message
                                pass

                            c_hash[tag_name] = items

                        base_name = f.split(".")[0]
                        object_hash[base_name] = c_hash

                shutil.rmtree(tmp)
            #print '>>> ',object_hash
            return_elements = []
            for element in list_response:
                if config.connection.get_plugin_client_setting(
                        'mm_ignore_managed_metadata') == True:
                    if 'manageableState' in element and element[
                            "manageableState"] != "unmanaged":
                        continue

                children = []
                full_name = element['fullName']
                #if full_name == "PersonAccount":
                #    full_name = "Account"
                #print 'processing: ', element
                if has_children_metadata == True:
                    if not full_name in object_hash:
                        continue
                    object_detail = object_hash[full_name]
                    if object_detail == None:
                        continue

                    for child in metadata_type_def['childXmlNames']:
                        child_type_def = util.get_meta_type_by_name(child)
                        if child_type_def == None:  #TODO: handle more complex types
                            continue
                        tag_name = child_type_def['tagName']
                        if len(object_detail[tag_name]) > 0:
                            gchildren = []
                            for gchild_el in object_detail[tag_name]:
                                gchildren.append({
                                    "text":
                                    gchild_el,
                                    "isFolder":
                                    False,
                                    "checked":
                                    False,
                                    "level":
                                    4,
                                    "leaf":
                                    True,
                                    "id":
                                    metadata_type_def['xmlName'] + "." +
                                    full_name + "." + tag_name + "." +
                                    gchild_el,
                                    "select":
                                    False,
                                    "title":
                                    gchild_el
                                })
                                children = sorted(children,
                                                  key=itemgetter('text'))

                            children.append({
                                "text":
                                child_type_def['tagName'],
                                "isFolder":
                                True,
                                "cls":
                                "folder",
                                "children":
                                gchildren,
                                "checked":
                                False,
                                "level":
                                3,
                                "id":
                                metadata_type_def['xmlName'] + "." +
                                full_name + "." + tag_name,
                                "select":
                                False,
                                "title":
                                child_type_def['tagName']
                            })

                #if this type has folders, run queries to grab all metadata in the folders
                if is_folder_metadata == True:
                    if config.connection.get_plugin_client_setting(
                            'mm_ignore_managed_metadata', True):
                        if 'manageableState' in element and element[
                                "manageableState"] != "unmanaged":
                            continue
                    #print element["fullName"]
                    list_request = {
                        "type": metadata_type,
                        "folder": element["fullName"]
                    }
                    list_basic_response = self.listMetadata(
                        list_request, True, config.connection.sfdc_api_version)

                    if type(list_basic_response) is not list:
                        list_basic_response = [list_basic_response]

                    for folder_element in list_basic_response:
                        children.append({
                            "text":
                            folder_element['fullName'].split("/")[1],
                            "leaf":
                            True,
                            "isFolder":
                            False,
                            "checked":
                            False,
                            "level":
                            3,
                            "id":
                            folder_element['fullName'].replace('/', '.'),
                            "select":
                            False,
                            "title":
                            folder_element['fullName'].split("/")[1]
                        })

                children = sorted(children, key=itemgetter('text'))
                is_leaf = True
                cls = ''
                if is_folder_metadata:
                    is_leaf = False
                    cls = 'folder'
                if has_children_metadata:
                    is_leaf = False
                    cls = 'folder'
                if metadata_type_def['xmlName'] == 'Workflow':
                    is_leaf = True
                    cls = ''
                #print '>>> ',element
                return_elements.append({
                    "text":
                    element['fullName'],
                    "isFolder":
                    is_folder_metadata or has_children_metadata,
                    "cls":
                    cls,
                    "leaf":
                    is_leaf,
                    "children":
                    children,
                    "checked":
                    False,
                    "level":
                    2,
                    "id":
                    metadata_type_def['xmlName'] + '.' +
                    full_name.replace(' ', ''),
                    "select":
                    False,
                    "title":
                    element['fullName']
                })

            return_elements = sorted(return_elements, key=itemgetter('text'))
            # if list_response == []:
            #     return list_response

            # return list_response
            return return_elements
Пример #6
0
    def execute(self):
        archive_deployments = config.connection.get_plugin_client_setting(
            "mm_archive_deployments", True)
        finish_deploy = self.params.get('finish', False)
        compare = config.connection.get_plugin_client_setting(
            "mm_compare_before_deployment", True)
        destinations = self.params['destinations']
        deploy_metadata = config.sfdc_client.retrieve(
            package=self.params['package'])
        deploy_name = self.params.get('new_deployment_name', None)
        threads = []

        if not finish_deploy and compare:
            source_retrieve_result = config.sfdc_client.retrieve(
                package=self.params['package'])
            debug('source_retrieve_result')
            debug(source_retrieve_result)

            source_dict = {}
            for fp in source_retrieve_result.fileProperties:
                source_dict[fp.fileName] = fp

            debug('source_dict')
            debug(source_dict)

            #need to compare package.xml to destination orgs here
            for destination in destinations:
                thread = CompareHandler(config.project, destination,
                                        self.params, self.params['package'])
                threads.append(thread)
                thread.start()

            compare_results = []
            for thread in threads:
                thread.join()
                compare_results.append(thread.result)

            debug('compare_results')
            debug(compare_results)
            destination_dict = {}

            for cr in compare_results:
                cr_dict = {}
                for fpfp in cr.fileProperties:
                    cr_dict[fpfp.fileName] = fpfp
                destination_dict[cr.username] = cr_dict

            debug('destination_dict')
            debug(destination_dict)

            final_compare_result = {}
            for d in destinations:
                final_compare_result[d['username']] = {}

            for file_name, file_details in source_dict.iteritems():
                if 'package.xml' in file_name:
                    continue
                for username, username_value in destination_dict.iteritems():
                    destination_retrieve_details = destination_dict[username]

                    if 'package.xml' in file_name:
                        continue

                    short_file_name = file_name.split('/')[-1]
                    mtype = util.get_meta_type_by_suffix(
                        short_file_name.split('.')[-1])

                    if file_name not in destination_retrieve_details:
                        final_compare_result[username][file_name] = {
                            'name': short_file_name,
                            'type': mtype['xmlName'],
                            'action': 'insert',
                            'message': 'Create'
                        }
                    else:
                        destination_file_detail = destination_retrieve_details[
                            file_name]
                        source_file_detail = source_dict[file_name]
                        if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate:
                            final_compare_result[username][file_name] = {
                                'name': short_file_name,
                                'type': mtype['xmlName'],
                                'action': 'update',
                                'message': 'You will overwrite this file'
                            }
                        else:
                            final_compare_result[username][file_name] = {
                                'name':
                                short_file_name,
                                'type':
                                mtype['xmlName'],
                                'action':
                                'update_conflict',
                                'message':
                                'Destination file is newer than source file'
                            }

            # final_compare_result = {}
            # for d in destinations:
            #     final_compare_result[d['username']] = {}

            # for username, username_value in destination_dict.iteritems():
            #     #destination_dict = destination_dict[username]
            #     for file_name, file_details in username_value.iteritems():
            #         if 'package.xml' in file_name:
            #             continue;

            #         short_file_name = file_name.split('/')[-1]
            #         mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1])

            #         if file_name not in source_dict:
            #             final_compare_result[username][file_name] = {
            #                 'name' : short_file_name,
            #                 'type' : mtype['xmlName'],
            #                 'action': 'insert',
            #                 'message' : 'Create'
            #             }
            #         else:
            #             destination_file_detail = username_value[file_name]
            #             source_file_detail = source_dict[file_name]
            #             if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate:
            #                 final_compare_result[username][file_name] = {
            #                     'name' : short_file_name,
            #                     'type' : mtype['xmlName'],
            #                     'action' : 'update',
            #                     'message' : 'You will overwrite this file'
            #                 }
            #             else:
            #                 final_compare_result[username][file_name] = {
            #                     'name' : short_file_name,
            #                     'type' : mtype['xmlName'],
            #                     'action' : 'update_conflict',
            #                     'message' : 'Destination file is newer than source file'
            #                 }

            debug('final_compare_result')
            debug(final_compare_result)

            if self.args.respond_with_html == True:
                html = util.generate_html_response('deploy_compare',
                                                   final_compare_result,
                                                   self.params)
                response = json.loads(
                    util.generate_success_response(html, "html"))
                response['compare_success'] = True
                # if deployment to one org fails, the entire deploy was not successful
                # for result in final_compare_result:
                #     if result['success'] == False:
                #         response['compare_success'] = False
                #         break
                return json.dumps(response)
            else:
                return json.dumps(final_compare_result, indent=4)

        for destination in destinations:
            if archive_deployments:
                deploy_path = os.path.join(config.project.location, "deploy",
                                           destination['username'])
                if not os.path.exists(deploy_path):
                    os.makedirs(deploy_path)
                if not os.path.isfile(
                        os.path.join(config.project.location, "deploy",
                                     '.config')):
                    config_file = open(
                        os.path.join(config.project.location, "deploy",
                                     '.config'), 'wb')
                    config_file_contents = {
                        'deployments': {
                            'named': [],
                            'timestamped': []
                        }
                    }
                    config_file.write(json.dumps(config_file_contents))
                    config_file.close()

                ts = time.time()
                if not config.is_windows:
                    timestamp = datetime.datetime.fromtimestamp(ts).strftime(
                        '%Y-%m-%d %H:%M:%S')
                else:
                    timestamp = datetime.datetime.fromtimestamp(ts).strftime(
                        '%Y-%m-%d %H %M %S')

                if deploy_name:
                    if os.path.isdir(
                            os.path.join(config.project.location, "deploy",
                                         destination['username'],
                                         deploy_name)):
                        shutil.rmtree(
                            os.path.join(config.project.location, "deploy",
                                         destination['username'], deploy_name))
                    os.makedirs(
                        os.path.join(config.project.location, "deploy",
                                     destination['username'], deploy_name))
                    util.extract_base64_encoded_zip(
                        deploy_metadata.zipFile,
                        os.path.join(config.project.location, "deploy",
                                     destination['username'], deploy_name))

                    config_file_json = util.parse_json_from_file(
                        os.path.join(config.project.location, "deploy",
                                     '.config'))
                    named_deployment = {
                        'destination':
                        destination['username'],
                        'name':
                        deploy_name,
                        'timestamp':
                        timestamp,
                        'id':
                        util.get_random_string(30),
                        'package':
                        os.path.join(config.project.location, "deploy",
                                     destination['username'], deploy_name,
                                     'unpackaged', 'package.xml')
                    }
                    config_file_json['deployments']['named'].append(
                        named_deployment)
                    config_file = open(
                        os.path.join(config.project.location, "deploy",
                                     '.config'), 'wb')
                    config_file.write(json.dumps(config_file_json))
                    config_file.close()
                else:
                    os.makedirs(
                        os.path.join(config.project.location, "deploy",
                                     destination['username'], timestamp))
                    util.extract_base64_encoded_zip(
                        deploy_metadata.zipFile,
                        os.path.join(config.project.location, "deploy",
                                     destination['username'], timestamp))

                    config_file_json = util.parse_json_from_file(
                        os.path.join(config.project.location, "deploy",
                                     '.config'))
                    timestamped_deployment = {
                        'destination':
                        destination['username'],
                        'timestamp':
                        timestamp,
                        'id':
                        util.get_random_string(30),
                        'package':
                        os.path.join(config.project.location, "deploy",
                                     destination['username'], timestamp,
                                     'unpackaged', 'package.xml')
                    }
                    config_file_json['deployments']['timestamped'].append(
                        timestamped_deployment)
                    config_file = open(
                        os.path.join(config.project.location, "deploy",
                                     '.config'), 'wb')
                    config_file.write(json.dumps(config_file_json))
                    config_file.close()

            thread = DeploymentHandler(config.project, destination,
                                       self.params, deploy_metadata)
            threads.append(thread)
            thread.start()
        deploy_results = []
        for thread in threads:
            thread.join()
            deploy_results.append(thread.result)

        if self.args.respond_with_html == True:
            html = util.generate_html_response(self.args.operation,
                                               deploy_results, self.params)
            response = json.loads(util.generate_success_response(html, "html"))
            response['deploy_success'] = True
            # if deployment to one org fails, the entire deploy was not successful
            for result in deploy_results:
                if result['success'] == False:
                    response['deploy_success'] = False
                    break
            return json.dumps(response)
        else:
            return json.dumps(deploy_results, index=4)
Пример #7
0
    def listMetadataAdvanced(self, metadata_type):
        try:
            metadata_type_def = util.get_meta_type_by_name(metadata_type)
            if metadata_type_def == None:
                return []
            has_children_metadata = False
            if 'childXmlNames' in metadata_type_def and type(metadata_type_def['childXmlNames']) is list:
                has_children_metadata = True
            is_folder_metadata = 'inFolder' in metadata_type_def and metadata_type_def['inFolder']
            if is_folder_metadata == True:
                metadata_request_type = self.__transformFolderMetadataNameForListRequest(metadata_type)
            else:
                metadata_request_type = metadata_type
            list_response = self.listMetadata(metadata_request_type, True, util.SFDC_API_VERSION) 
            debug('--------------->')
            debug(list_response)
            if type(list_response) is not list:
                list_response = [list_response]
            #print list_response
            object_hash = {} #=> {"Account" => [ {"fields" => ["foo", "bar"]}, "listviews" => ["foo", "bar"] ], "Contact" => ... }

            if has_children_metadata == True and len(list_response) > 0: #metadata objects like customobject, workflow, etc.
                request_names = []
                for element in list_response:
                    #if element['fullName'] != 'PersonAccount':
                    request_names.append(element['fullName'])
                retrieve_result = self.retrieve(package={
                    metadata_request_type : request_names
                })
                #print '>>>> ',retrieve_result
                tmp = util.put_tmp_directory_on_disk()
                util.extract_base64_encoded_zip(retrieve_result.zipFile, tmp)

                #iterate extracted directory
                for dirname, dirnames, filenames in os.walk(os.path.join(tmp,"unpackaged",metadata_type_def['directoryName'])):
                    for f in filenames:
                        #f => Account.object
                        full_file_path = os.path.join(dirname, f)
                        data = util.parse_xml_from_file(full_file_path)
                        c_hash = {}
                        for child_type in metadata_type_def['childXmlNames']:
                            child_type_def = util.get_meta_type_by_name(child_type)
                            if child_type_def == None: #TODO handle newer child types
                                continue
                            tag_name = child_type_def['tagName']
                            items = []
                            try:
                                if tag_name in data[metadata_request_type]:
                                    if type(data[metadata_request_type][tag_name]) is not list:
                                        data[metadata_request_type][tag_name] = [data[metadata_request_type][tag_name]]
                                    for i, val in enumerate(data[metadata_request_type][tag_name]):
                                        items.append(val['fullName'])
                            except BaseException, e:
                                #print 'exception >>>> ', e.message
                                pass

                            c_hash[tag_name] = items

                        base_name = f.split(".")[0]
                        object_hash[base_name] = c_hash

                shutil.rmtree(tmp)
            #print '>>> ',object_hash
            return_elements = []
            for element in list_response:
                if config.connection.get_plugin_client_setting('mm_ignore_managed_metadata') == True:
                    if 'manageableState' in element and element["manageableState"] != "unmanaged":
                        continue

                children = []
                full_name = element['fullName']
                #if full_name == "PersonAccount":
                #    full_name = "Account" 
                #print 'processing: ', element
                if has_children_metadata == True:
                    if not full_name in object_hash:
                        continue
                    object_detail = object_hash[full_name]
                    if object_detail == None:
                        continue

                    for child in metadata_type_def['childXmlNames']:
                        child_type_def = util.get_meta_type_by_name(child)
                        if child_type_def == None: #TODO: handle more complex types
                            continue
                        tag_name = child_type_def['tagName']
                        if len(object_detail[tag_name]) > 0:
                            gchildren = []
                            for gchild_el in object_detail[tag_name]:
                                gchildren.append({
                                    "text"      : gchild_el,
                                    "isFolder"  : False,
                                    "checked"   : False,
                                    "level"     : 4,
                                    "leaf"      : True,
                                    "id"        : metadata_type_def['xmlName']+"."+full_name+"."+tag_name+"."+gchild_el,
                                    "select"    : False,
                                    "title"     : gchild_el
                                })
                                children = sorted(children, key=itemgetter('text')) 
                          
                            children.append({
                                "text"      : child_type_def['tagName'],
                                "isFolder"  : True,
                                "cls"       : "folder",
                                "children"  : gchildren,
                                "checked"   : False,
                                "level"     : 3,
                                "id"        : metadata_type_def['xmlName']+"."+full_name+"."+tag_name,
                                "select"    : False,
                                "title"     : child_type_def['tagName']
                            })
                                            
                #if this type has folders, run queries to grab all metadata in the folders
                if is_folder_metadata == True:
                    if config.connection.get_plugin_client_setting('mm_ignore_managed_metadata', True):
                        if 'manageableState' in element and element["manageableState"] != "unmanaged":
                            continue
                    #print element["fullName"]
                    list_request = {
                        "type"      : metadata_type,
                        "folder"    : element["fullName"]
                    }
                    list_basic_response = self.listMetadata(list_request, True, config.connection.sfdc_api_version) 

                    if type(list_basic_response) is not list:
                        list_basic_response = [list_basic_response]

                    for folder_element in list_basic_response:
                        children.append({
                            "text"      : folder_element['fullName'].split("/")[1],
                            "leaf"      : True,
                            "isFolder"  : False,
                            "checked"   : False,
                            "level"     : 3,
                            "id"        : folder_element['fullName'].replace('/', '.'),
                            "select"    : False,
                            "title"     : folder_element['fullName'].split("/")[1]

                        })
                    
                children = sorted(children, key=itemgetter('text')) 
                is_leaf = True
                cls = ''
                if is_folder_metadata:
                    is_leaf = False
                    cls = 'folder'
                if has_children_metadata:
                    is_leaf = False
                    cls = 'folder'
                if metadata_type_def['xmlName'] == 'Workflow':
                    is_leaf = True
                    cls = ''
                #print '>>> ',element
                return_elements.append({
                    "text"      : element['fullName'],
                    "isFolder"  : is_folder_metadata or has_children_metadata,
                    "cls"       : cls,
                    "leaf"      : is_leaf,
                    "children"  : children,
                    "checked"   : False,
                    "level"     : 2,
                    "id"        : metadata_type_def['xmlName']+'.'+full_name.replace(' ', ''),
                    "select"    : False,
                    "title"     : element['fullName']
                })

            return_elements = sorted(return_elements, key=itemgetter('text')) 
            # if list_response == []:
            #     return list_response

            # return list_response
            return return_elements
Пример #8
0
    def execute(self):
        archive_deployments = config.connection.get_plugin_client_setting("mm_archive_deployments", True)
        finish_deploy = self.params.get('finish', False)
        compare = config.connection.get_plugin_client_setting("mm_compare_before_deployment", True)
        destinations = self.params['destinations']
        deploy_metadata = config.sfdc_client.retrieve(package=self.params['package'])
        deploy_name = self.params.get('new_deployment_name', None)
        threads = []
        
        if not finish_deploy and compare:
            source_retrieve_result = config.sfdc_client.retrieve(package=self.params['package'])
            debug('source_retrieve_result')
            debug(source_retrieve_result)

            source_dict = {}
            for fp in source_retrieve_result.fileProperties:
                source_dict[fp.fileName] = fp

            debug('source_dict')
            debug(source_dict) 

            #need to compare package.xml to destination orgs here
            for destination in destinations:
                thread = CompareHandler(config.project, destination, self.params, self.params['package'])
                threads.append(thread)
                thread.start()  
                
            compare_results = []
            for thread in threads:
                thread.join()  
                compare_results.append(thread.result)
            
            debug('compare_results')
            debug(compare_results)
            destination_dict = {}

            for cr in compare_results:
                cr_dict = {}
                for fpfp in cr.fileProperties:
                    cr_dict[fpfp.fileName] = fpfp
                destination_dict[cr.username] = cr_dict

            debug('destination_dict')
            debug(destination_dict)    

            final_compare_result = {}
            for d in destinations:
                final_compare_result[d['username']] = {}

            for file_name, file_details in source_dict.iteritems():
                if 'package.xml' in file_name:
                    continue; 
                for username, username_value in destination_dict.iteritems():
                    destination_retrieve_details = destination_dict[username]
                    
                    if 'package.xml' in file_name:
                        continue

                    short_file_name = file_name.split('/')[-1]
                    mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1])
   
                    if file_name not in destination_retrieve_details:
                        final_compare_result[username][file_name] = {
                            'name' : short_file_name,
                            'type' : mtype['xmlName'],
                            'action': 'insert',
                            'message' : 'Create'
                        }
                    else:
                        destination_file_detail = destination_retrieve_details[file_name]
                        source_file_detail = source_dict[file_name]
                        if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate:
                            final_compare_result[username][file_name] = {
                                'name' : short_file_name,
                                'type' : mtype['xmlName'],
                                'action' : 'update',
                                'message' : 'You will overwrite this file'
                            }
                        else:
                            final_compare_result[username][file_name] = {
                                'name' : short_file_name,
                                'type' : mtype['xmlName'],
                                'action' : 'update_conflict',
                                'message' : 'Destination file is newer than source file'
                            }
            


            # final_compare_result = {}
            # for d in destinations:
            #     final_compare_result[d['username']] = {}

            # for username, username_value in destination_dict.iteritems():
            #     #destination_dict = destination_dict[username]
            #     for file_name, file_details in username_value.iteritems():
            #         if 'package.xml' in file_name:
            #             continue;

            #         short_file_name = file_name.split('/')[-1]
            #         mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1])

            #         if file_name not in source_dict:
            #             final_compare_result[username][file_name] = {
            #                 'name' : short_file_name,
            #                 'type' : mtype['xmlName'],
            #                 'action': 'insert',
            #                 'message' : 'Create'
            #             }
            #         else:
            #             destination_file_detail = username_value[file_name]
            #             source_file_detail = source_dict[file_name]
            #             if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate:
            #                 final_compare_result[username][file_name] = {
            #                     'name' : short_file_name,
            #                     'type' : mtype['xmlName'],
            #                     'action' : 'update',
            #                     'message' : 'You will overwrite this file'
            #                 }
            #             else:
            #                 final_compare_result[username][file_name] = {
            #                     'name' : short_file_name,
            #                     'type' : mtype['xmlName'],
            #                     'action' : 'update_conflict',
            #                     'message' : 'Destination file is newer than source file'
            #                 }

            debug('final_compare_result')
            debug(final_compare_result) 

            if self.args.respond_with_html == True:
                html = util.generate_html_response('deploy_compare', final_compare_result, self.params)
                response = json.loads(util.generate_success_response(html, "html"))
                response['compare_success'] = True
                # if deployment to one org fails, the entire deploy was not successful
                # for result in final_compare_result:
                #     if result['success'] == False:
                #         response['compare_success'] = False
                #         break
                return json.dumps(response)
            else:
                return json.dumps(final_compare_result,indent=4)   

        for destination in destinations:
            if archive_deployments:
                deploy_path = os.path.join(config.project.location,"deploy",destination['username'])
                if not os.path.exists(deploy_path):
                    os.makedirs(deploy_path)
                if not os.path.isfile(os.path.join(config.project.location,"deploy",'.config')):
                    config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb')
                    config_file_contents = { 
                        'deployments' : {
                            'named' : [],
                            'timestamped' : []
                        }
                    }
                    config_file.write(json.dumps(config_file_contents))
                    config_file.close()   

                ts = time.time()
                if not config.is_windows:
                    timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
                else:
                    timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H %M %S')

                if deploy_name:
                    if os.path.isdir(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)):
                        shutil.rmtree(os.path.join(config.project.location,"deploy",destination['username'],deploy_name))
                    os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],deploy_name))
                    util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],deploy_name))

                    config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config'))
                    named_deployment = {
                        'destination' : destination['username'],
                        'name' : deploy_name,
                        'timestamp' : timestamp,
                        'id' : util.get_random_string(30),
                        'package' : os.path.join(config.project.location,"deploy",destination['username'],deploy_name,'unpackaged','package.xml')
                    }
                    config_file_json['deployments']['named'].append(named_deployment)
                    config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb')
                    config_file.write(json.dumps(config_file_json))
                    config_file.close()
                else:
                    os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],timestamp))
                    util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],timestamp))

                    config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config'))
                    timestamped_deployment = {
                        'destination' : destination['username'],
                        'timestamp' : timestamp,
                        'id' : util.get_random_string(30),
                        'package' : os.path.join(config.project.location,"deploy",destination['username'],timestamp,'unpackaged','package.xml')
                    }
                    config_file_json['deployments']['timestamped'].append(timestamped_deployment)
                    config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb')
                    config_file.write(json.dumps(config_file_json))
                    config_file.close()

            thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata)
            threads.append(thread)
            thread.start()  
        deploy_results = []
        for thread in threads:
            thread.join()  
            deploy_results.append(thread.result)
                
        if self.args.respond_with_html == True:
            html = util.generate_html_response(self.args.operation, deploy_results, self.params)
            response = json.loads(util.generate_success_response(html, "html"))
            response['deploy_success'] = True
            # if deployment to one org fails, the entire deploy was not successful
            for result in deploy_results:
                if result['success'] == False:
                    response['deploy_success'] = False
                    break
            return json.dumps(response)
        else:
            return json.dumps(deploy_results,index=4)