Exemplo n.º 1
0
Arquivo: unittest.py Projeto: azam/mm
    def execute(self):
        sfdc_client = config.sfdc_client
 
        empty_package_xml = util.get_empty_package_xml_contents()
        tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True)
        util.put_empty_package_xml_in_directory(tmp_unpackaged, empty_package_xml)
        zip_file = util.zip_directory(tmp, tmp)
        deploy_params = {
            "zip_file"          : zip_file,
            "rollback_on_error" : True,
            "ret_xml"           : True,
            "classes"           : self.params.get('classes', []),
            "debug_categories"  : self.params.get('debug_categories', [])
        }
        deploy_result = sfdc_client.deploy(deploy_params,is_test=True)
        #debug(deploy_result)
        d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor)
        if int(float(util.SFDC_API_VERSION)) >= 29:
            result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']['details']['runTestResult']
        else:
            result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']['runTestResult']

        try:
            result['log'] = d["soapenv:Envelope"]["soapenv:Header"]["DebuggingInfo"]["debugLog"]
        except:
            result['log'] = 'Log not available.'

        shutil.rmtree(tmp)

        if self.args.respond_with_html:
            html = util.generate_html_response(self.args.operation, result, self.params)
            return util.generate_success_response(html, "html")
        else:
            return result
Exemplo n.º 2
0
def verifyRuntimeConstraints(log, assertion):
    calls = xml.parse(log)[u"history"][u"call"]
    if not isinstance(calls, list):
        # hack to deal with xml quirk
        calls = [calls]
    processed = []
    for call in calls:
        invocation = (call[u"methodName"],
                      time.strptime(call[u"startTime"], "%Y/%m/%d %H:%M:%S"))
        processed.append(invocation)
    # sort it to enhance efficiency
    processed.sort(key=lambda x: x[1])

    offended = []
    # this is a sufficient bases
    for statement in assertion.statements:
        # print statement
        if statement.action == "return":
            continue
        failed = True
        for i in xrange(len(processed)):
            if str(statement.object) == processed[i][0]:
                satisfiedAll = True
                for modifier in statement.modifiers:
                    # proceed only if it's a valid modifier
                    if modifier:
                        satisfied = False
                        if modifier[0] == 'after':
                            for j in xrange(0, i):
                                if str(modifier[1]) == processed[j][0]:
                                    satisfied = True
                                    break
                        elif modifier[0] == 'before':
                            for j in xrange(i, len(processed)):
                                if str(modifier[1]) == processed[j][0]:
                                    satisfied = True
                                    break
                        elif modifier[0] == 'less' or modifier[0] == 'more':
                            num = sum([
                                1 for call in processed
                                if str(statement.object) == call[0]
                            ])
                            # print modifier, num
                            if modifier[0] == 'less':
                                satisfied = True if num < modifier[
                                    1] else satisfied
                            else:
                                satisfied = True if num > modifier[
                                    1] else satisfied

                        if not satisfied:
                            satisfiedAll = False

                if satisfiedAll:
                    failed = False

        if failed ^ statement.negated:
            offended.append(statement)

    return offended
Exemplo n.º 3
0
    def __init__(self, filename):
        """Initializes InformationNeed object with data from XML file
        mentioned in arg, and stores that data in a dictionary attribute for
        easier access and manipulation.

        :param filename: Filename of XML file to extract data from.
        """
        with open(filename, 'r') as infile:
            # Ensure that data only resides on a single line
            data = infile.read().replace('\n', '')

        if data is not None:

            self.dict = dict()
            # All XML contents are within 'query' tags, hence all other tags
            # are nested under another dictionary with 'query' key value
            # after parsing with xmltodict.
            temp_dict = xmltodict.parse(data)['query']
            # Convert from ordered dictionary to dictionary data structure
            temp_dict = dict(temp_dict)

            for key in temp_dict:
                if key == u'description':
                    # Remove "Relevant documents will describe " (33 chars)
                    # which appears in all information need files' description
                    self.dict[key] = temp_dict[key][33:]
                else:
                    self.dict[key] = temp_dict[key]

        else:
            # File is empty
            raise InformationNeed(filename)
Exemplo n.º 4
0
    def __init__(self, filename):
        """Initializes Patent object with data from XML file mentioned in arg,
        and stores that data in a dictionary attribute for easier access and
        manipulation.

        :param filename: Filename of XML patent file to extract data from.
        """
        with open(filename, 'r') as infile:
            # Ensure that data only resides on a single line
            data = infile.read().replace('\n', '')

        if data is not None:

            self.dict = dict()
            # All XML contents are within 'doc' and 'str' tags consecutively,
            # hence after parsing with xmltodict, all other tags are nested
            # under a list of ordered dictionaries with u'@name' or u'#text'
            # keys and XML element or content values respectively.
            temp_dict = xmltodict.parse(data)['doc']['str']

            for ord_dict in temp_dict:
                # Ignore empty fields
                if u'#text' in ord_dict.keys():
                    # Simplify dictionary structure
                    self.dict[ord_dict[u'@name']] = ord_dict[u'#text']

        else:
            # File is empty
            raise PatentFileException(filename)
Exemplo n.º 5
0
    def execute(self):
        sfdc_client = config.sfdc_client
 
        empty_package_xml = util.get_empty_package_xml_contents()
        tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True)
        util.put_empty_package_xml_in_directory(tmp_unpackaged, empty_package_xml)
        zip_file = util.zip_directory(tmp, tmp)
        deploy_params = {
            "zip_file"          : zip_file,
            "rollback_on_error" : True,
            "ret_xml"           : True,
            "classes"           : self.params.get('classes', []),
            "debug_categories"  : self.params.get('debug_categories', [])
        }
        deploy_result = sfdc_client.deploy(deploy_params,is_test=True)
        #debug(deploy_result)
        d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor)
        if int(float(util.SFDC_API_VERSION)) >= 29:
            result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']['details']['runTestResult']
        else:
            result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']['runTestResult']

        try:
            result['log'] = d["soapenv:Envelope"]["soapenv:Header"]["DebuggingInfo"]["debugLog"]
        except:
            result['log'] = 'Log not available.'

        shutil.rmtree(tmp)

        if self.args.respond_with_html:
            html = util.generate_html_response(self.args.operation, result, self.params)
            return util.generate_success_response(html, "html")
        else:
            return result
Exemplo n.º 6
0
 def parse_dom_nets(self, xmldesc):
   result = list()
   xmld = xmltodict.parse(xmldesc)
   interfaces = xmld['domain']['devices']['interface']
   if type(interfaces) is not list:
     result.append(interfaces['target']['@dev'])
   else:
     for i in interfaces:
       result.append(i['target']['@dev'])
   return result
Exemplo n.º 7
0
def verifyRuntimeConstraints(log, assertion):
  calls = xml.parse(log)[u"history"][u"call"]
  if not isinstance(calls, list):
    # hack to deal with xml quirk
    calls = [calls]
  processed = []
  for call in calls:
    invocation = (call[u"methodName"],
                  time.strptime(call[u"startTime"], "%Y/%m/%d %H:%M:%S"))
    processed.append(invocation)
  # sort it to enhance efficiency
  processed.sort(key = lambda x: x[1])

  offended = []
  # this is a sufficient bases
  for statement in assertion.statements:
    # print statement
    if statement.action == "return":
      continue
    failed = True
    for i in xrange(len(processed)):
      if str(statement.object) == processed[i][0]:
        satisfiedAll = True
        for modifier in statement.modifiers:
          # proceed only if it's a valid modifier
          if modifier:
            satisfied = False
            if modifier[0] == 'after':
              for j in xrange(0, i):
                if str(modifier[1]) == processed[j][0]:
                  satisfied = True
                  break
            elif modifier[0] == 'before':
              for j in xrange(i, len(processed)):
                if str(modifier[1]) == processed[j][0]:
                  satisfied = True
                  break
            elif modifier[0] == 'less' or modifier[0] == 'more':
              num = sum([1 for call in processed if str(statement.object) == call[0]])
              # print modifier, num
              if modifier[0] == 'less':
                satisfied = True if num < modifier[1] else satisfied
              else:
                satisfied = True if num > modifier[1] else satisfied

            if not satisfied:
              satisfiedAll = False

        if satisfiedAll:
          failed = False

    if failed ^ statement.negated:
      offended.append(statement)

  return offended
Exemplo n.º 8
0
 def __init__(self, filename):
     with open(filename, 'r') as infile:
         data = infile.read().replace('\n', '')
     if data is not None:
         temp_dict = xmltodict.parse(data)
         self.dict = dict()
         for each in temp_dict['doc']['str']:
             if u'#text' in each.keys() and u'#text' in each.keys():
                 self.dict[each[u'@name']] = each[u'#text']
     else:
         raise PatentFileException(filename)
Exemplo n.º 9
0
 def __init__(self, filename):
     with open(filename, 'r') as infile:
         data = infile.read().replace('\n', '')
     if data is not None:
         temp_dict = xmltodict.parse(data)
         self.dict = dict()
         for each in temp_dict['doc']['str']:
             if u'#text' in each.keys() and u'#text' in each.keys():
                 self.dict[each[u'@name']] = each[u'#text']
     else:
         raise PatentFileException(filename)
Exemplo n.º 10
0
    def init_config(self):
        if not os.path.exists(self.settings_dict["sou_public_html"] + "/app/etc/local.xml"):
            print(Colors.FAIL + "Magento 1 Configuration doesn't exist, exiting" + Colors.ENDC)
            exit(1)
        with open(self.settings_dict["sou_public_html"] + "/app/etc/local.xml", "r") as f:
            data = f.read()
            f.close()
        try:
            doc = xmltodict.parse(data)
            config = json.dumps(doc)
            self.m1_config_json = json.loads(config)
        except ExpatError:
            doc = xmltodict.parse(data[2:])
            config = json.dumps(doc)
            self.m1_config_json = json.loads(config)

        if self.m1_config_json["config"]["global"]["resources"]["db"]["table_prefix"] == None:
            self.prefix = ""
        else:
            self.prefix = self.m1_config_json["config"]["global"]["resources"]["db"]["table_prefix"]
        print(self.m1_config_json)
        self.configure_database()
Exemplo n.º 11
0
 def __init__(self, filename):
     with open(filename, 'r') as infile:
         data = infile.read().replace('\n', '')
     if data is not None:
         temp_dict = xmltodict.parse(data)
         self.dict = dict()
         temp_dict = dict(temp_dict['query'])
         for item in temp_dict:
             if item == u'description':
                 self.dict[item] = temp_dict[item][33:]
             else:
                 self.dict[item] = temp_dict[item]
     else:
         raise InformationNeed(filename)
Exemplo n.º 12
0
 def __init__(self, filename):
     with open(filename, 'r') as infile:
         data = infile.read().replace('\n', '')
     if data is not None:
         temp_dict = xmltodict.parse(data)
         self.dict = dict()
         temp_dict = dict(temp_dict['query'])
         for item in temp_dict:
             if item == u'description':
                 self.dict[item] = temp_dict[item][33:]
             else:
                 self.dict[item] = temp_dict[item]
     else:
         raise InformationNeed(filename)
Exemplo n.º 13
0
    def request(self, url, log=False, **kwargs):
        """

        :param url: url template
        :param kwargs: attrs for url build
        :return:
        """
        url = url.format(address=self.address, **kwargs or {})
        if log:
            self.logger.info(url)

        request = self.session.get(url, timeout=URL_REQUEST_TIMEOUT)
        if not request.text:
            return {}

        return xmltodict.parse(request.text.encode("utf-8"), attr_prefix='')
Exemplo n.º 14
0
 def listMetadata(self, metadata_type, retXml=True, version=26.0):
     # obj = { 'type': 'ApexClass' }
     # response = mclient.service.listMetadata(obj, 25.0)
     self._sforce.set_options(retxml=retXml)
     if type(metadata_type) is not dict and type(metadata_type) is not list:
         obj = { 'type' : metadata_type }
     else:
         obj = metadata_type
     list_result = self._handleResultTyping(self._sforce.service.listMetadata(obj, version))
     self._sforce.set_options(retxml=False)
     if retXml == True:
         try:
             list_result_dict = xmltodict.parse(list_result,postprocessor=mm_util.xmltodict_postprocessor)
             return list_result_dict['soapenv:Envelope']["soapenv:Body"]["listMetadataResponse"]["result"]
         except:
             return []
     return list_result
Exemplo n.º 15
0
Arquivo: apex.py Projeto: akshayas/mm
 def runTests(self, params):
     #ERROR, WARN, INFO, DEBUG, FINE, FINER, FINEST
     #Db, Workflow, Validation, Callout, Apex_code, Apex_profiling, All
     retXml = params.get('retXml', True)
     self._sforce.set_options(retxml=retXml)
     if 'debug_categories' in params:
         self._setHeaders('runTests', debug_categories=params['debug_categories'])
     payload = {
         'namespace' : params.get('namespace', None),
         'allTests'  : params.get('run_all_tests', False),
         'classes'   : params.get('classes', [])
     }
     test_result = self._handleResultTyping(self._sforce.service.runTests(payload))
     self._sforce.set_options(retxml=False)
     if retXml == True:
         return xmltodict.parse(test_result,postprocessor=mm_util.xmltodict_postprocessor)
     else:
         test_result['log'] = self.getDebugLog()
     return test_result
Exemplo n.º 16
0
 def runTests(self, params):
     #ERROR, WARN, INFO, DEBUG, FINE, FINER, FINEST
     #Db, Workflow, Validation, Callout, Apex_code, Apex_profiling, All
     retXml = params.get('retXml', True)
     self._sforce.set_options(retxml=retXml)
     if 'debug_categories' in params:
         self._setHeaders('runTests',
                          debug_categories=params['debug_categories'])
     payload = {
         'namespace': params.get('namespace', None),
         'allTests': params.get('run_all_tests', False),
         'classes': params.get('classes', [])
     }
     test_result = self._handleResultTyping(
         self._sforce.service.runTests(payload))
     self._sforce.set_options(retxml=False)
     if retXml == True:
         return xmltodict.parse(test_result,
                                postprocessor=util.xmltodict_postprocessor)
     else:
         test_result['log'] = self.getDebugLog()
     return test_result
Exemplo n.º 17
0
 def listMetadata(self, metadata_type, retXml=True, version=26.0):
     # obj = { 'type': 'ApexClass' }
     # response = mclient.service.listMetadata(obj, 25.0)
     self._sforce.set_options(retxml=retXml)
     if type(metadata_type) is not dict and type(metadata_type) is not list:
         obj = {'type': metadata_type}
     else:
         obj = metadata_type
     list_result = self._handleResultTyping(
         self._sforce.service.listMetadata(obj, version))
     debug('list_result ------>')
     debug(list_result)
     self._sforce.set_options(retxml=False)
     if retXml == True:
         try:
             list_result_dict = xmltodict.parse(
                 list_result, postprocessor=util.xmltodict_postprocessor)
             return list_result_dict['soapenv:Envelope']["soapenv:Body"][
                 "listMetadataResponse"]["result"]
         except:
             return []
     return list_result
Exemplo n.º 18
0
def readRecords(records,LOGGER=settings.LOGGER):
  '''
  records: [(bibcode,JSON_fingerprint),...]
  '''
  h = hash(json.dumps(records))
  if not records:
    LOGGER.debug("No records given")
    return []

  targets = dict(records)

  s = time.time()
  records = ADSRecords('full','XML')
  failures = []
  for bibcode in targets.keys():
    try:
      records.addCompleteRecord(bibcode)
    except KeyboardInterrupt:
      raise
    except:
      failures.append(bibcode)
      LOGGER.warning("[%s] ADSRecords failed" % bibcode)
  records = records.export()
  if not records.content:
    return []
  ttc = time.time()-s
  rate = len(targets)/ttc
  if failures:
    LOGGER.warning('ADSRecords failed to retrieve %s records' % len(failures))
  LOGGER.info('ADSRecords took %0.1fs to query %s records (%0.1f rec/s)\t[%s]' % (ttc,len(targets),rate,h))

  records = ensureList(xmltodict.parse(records.__str__())['records']['record'])
  assert(len(records)==len(targets)-len(failures))

  # with open('%s.pickle' % uuid.uuid4(),'w') as fp:
  #   pickle.dump(records,fp)
  return records,targets
Exemplo n.º 19
0
    def retrieve(self, **kwargs):
        # request = {
        #   'RetrieveRequest': {
        #     'unpackaged': {
        #       'types': {
        #         'ApexTrigger': '*'
        #       }
        #     },
        #     'apiVersion': {
        #       25.0
        #     }
        #   }
        # }
        # package = {
        #     'unpackaged' : {
        #         'types' : [
        #             {
        #                 "members": "*", 
        #                 "name": "ApexClass"
        #             }
        #         ]
        #     }
        # }
        package_dict = None
        request_payload = None
        
        if 'package' in kwargs and type(kwargs['package']) is not dict: 
            #if package is location of package.xml, we'll parse the xml and create a request
            package_dict = xmltodict.parse(mm_util.get_file_as_string(kwargs['package']))
            api_version = package_dict['Package']['version']
            package_dict['unpackaged'] = package_dict.pop('Package')
            package_dict['unpackaged'].pop('version')
            package_dict['unpackaged'].pop("@xmlns", None)
            package_dict['unpackaged'].pop("#text", None)
            package_dict['apiVersion'] = api_version
            types = package_dict['unpackaged']['types']
            if type(types) is not list:
                types = [types]
            if type(package_dict['unpackaged']['types']) is not list:
                package_dict['unpackaged']['types'] = [package_dict['unpackaged']['types']]
            requested_types = []
            if 'type' in kwargs and kwargs['type'] != None and kwargs['type'] != '': #if the request is for a certain type, only request that type
                for i, val in enumerate(types):
                    if val['name'] == kwargs['type']:
                        requested_types.append(val)
                package_dict['unpackaged']['types'] = requested_types
                types = requested_types
            for i, val in enumerate(types):
                try:
                    package_dict['unpackaged']['types'][i].pop("#text", None)
                except:
                    package_dict['unpackaged']['types'].pop("#text", None)

            #if custom object is asterisked, we need to explictly retrieve standard objects
            for t in package_dict['unpackaged']['types']:
                if 'name' in t and t['name'] == 'CustomObject':
                    if 'members' in t and type(t['members']) is not list:
                        if t['members'] == "*":
                            mlist = self.listMetadata('CustomObject', False)
                            objs = []
                            for obj in mlist:
                                if ('__c') not in mlist:
                                    objs.append(obj['fullName'])
                            objs.append("*")
                            objs.sort()
                            t['members'] = objs

            request_payload = package_dict

        elif 'package' in kwargs and type(kwargs['package']) is dict:
            package = kwargs['package']
            if 'unpackaged' not in package:
                #{ "ApexClass"    : ["MultiselectControllerTest","MultiselectController"] }
                type_array = []
                for i, metadata_type in enumerate(package):
                    member_value = package[metadata_type]
                    type_array.append({ "name" : metadata_type, "members" : member_value })

                package = {
                    'unpackaged' : {
                        'types' : type_array
                    },
                    'apiVersion' : mm_util.SFDC_API_VERSION
                }
            
            #if custom object is asterisked, we need to explictly retrieve standard objects
            for t in package['unpackaged']['types']:
                if 'name' in t and t['name'] == 'CustomObject':
                    if 'members' in t and type(t['members']) is not list:
                        if t['members'] == "*":
                            mlist = self.listMetadata('CustomObject', False)
                            objs = []
                            for obj in mlist:
                                if ('__c') not in mlist:
                                    objs.append(obj['fullName'])
                            objs.append("*")
                            objs.sort()
                            t['members'] = objs
            
            request_payload = package
        
        result = self._handleResultTyping(self._sforce.service.retrieve(request_payload))
        if result.done == False:
            self._waitForRequest(result.id)
            return self._getRetrieveBody(result.id)
        else:
            return result
Exemplo n.º 20
0
    def retrieve(self, **kwargs):
        # request = {
        #   'RetrieveRequest': {
        #     'unpackaged': {
        #       'types': {
        #         'ApexTrigger': '*'
        #       }
        #     },
        #     'apiVersion': {
        #       25.0
        #     }
        #   }
        # }
        # package = {
        #     'unpackaged' : {
        #         'types' : [
        #             {
        #                 "members": "*", 
        #                 "name": "ApexClass"
        #             }
        #         ]
        #     }
        # }
        package_dict = None
        request_payload = None

        debug('retrieve request: ')
        debug(kwargs['package'])
        
        if 'package' in kwargs and type(kwargs['package']) is not dict: 
            #if package is location of package.xml, we'll parse the xml and create a request
            package_dict = xmltodict.parse(util.get_file_as_string(kwargs['package']))
            api_version = package_dict['Package']['version']
            package_dict['unpackaged'] = package_dict.pop('Package')
            package_dict['unpackaged'].pop('version')
            package_dict['unpackaged'].pop("@xmlns", None)
            package_dict['unpackaged'].pop("#text", None)
            package_dict['apiVersion'] = api_version
            types = package_dict['unpackaged']['types']
            if type(types) is not list:
                types = [types]
            if type(package_dict['unpackaged']['types']) is not list:
                package_dict['unpackaged']['types'] = [package_dict['unpackaged']['types']]
            requested_types = []
            if 'type' in kwargs and kwargs['type'] != None and kwargs['type'] != '': #if the request is for a certain type, only request that type
                for i, val in enumerate(types):
                    if val['name'] == kwargs['type']:
                        requested_types.append(val)
                package_dict['unpackaged']['types'] = requested_types
                types = requested_types
            for i, val in enumerate(types):
                try:
                    package_dict['unpackaged']['types'][i].pop("#text", None)
                except:
                    package_dict['unpackaged']['types'].pop("#text", None)

            #if custom object is asterisked, we need to explictly retrieve standard objects
            for t in package_dict['unpackaged']['types']:
                if 'name' in t:
                    metadata_type_def = util.get_meta_type_by_name(t['name'])
                    if metadata_type_def is not None and 'inFolder' in metadata_type_def and metadata_type_def['inFolder']: #TODO: right now this skips retrieval of unknown types, we should use describe data
                        if 'members' in t and type(t['members']) is not list:
                            if t['members'] == "*" or t['members'] == []:
                                mlist = self.listMetadata(t['name'], False)
                                objs = []
                                for obj in mlist:
                                    objs.append(obj['fullName'])
                                objs.sort()
                                t['members'] = objs
                    elif t['name'] == 'CustomObject':
                        if 'members' in t and type(t['members']) is not list:
                            if t['members'] == "*":
                                mlist = self.listMetadata('CustomObject', False)
                                objs = []
                                for obj in mlist:
                                    if ('__c') not in mlist:
                                        objs.append(obj['fullName'])
                                objs.append("*")
                                objs.sort()
                                t['members'] = objs

            request_payload = package_dict

        elif 'package' in kwargs and type(kwargs['package']) is dict:
            package = kwargs['package']
            if package == {}:
                raise MMException('Invalid package')
            if 'unpackaged' not in package:
                #{ "ApexClass"    : ["MultiselectControllerTest","MultiselectController"] }
                type_array = []
                for i, metadata_type in enumerate(package):
                    member_value = package[metadata_type]
                    type_array.append({ "name" : metadata_type, "members" : member_value })

                package = {
                    'unpackaged' : {
                        'types' : type_array
                    },
                    'apiVersion' : util.SFDC_API_VERSION
                }
            
            #if custom object is asterisked, we need to explictly retrieve standard objects
            for t in package['unpackaged']['types']:
                debug('----> ')
                debug(t)
                if 'name' in t:
                    metadata_type_def = util.get_meta_type_by_name(t['name'])
                    debug(metadata_type_def)
                    if metadata_type_def is not None and 'inFolder' in metadata_type_def and metadata_type_def['inFolder']: #TODO: right now this skips retrieval of unknown types, we should use describe data
                        if 'members' in t and (t['members'] == "*" or t['members'] == []):
                            #list_request_name = self.__transformFolderMetadataNameForListRequest(t['name'])
                            #mlist = self.listMetadata(list_request_name, False)
                            mlist = self.listMetadataAdvanced(t['name'])
                            objs = []
                            for obj in mlist:
                                debug('---obj')
                                debug(obj)
                                objs.append(obj['title'])
                                if 'children' in obj and type(obj['children'] is list):
                                    for child in obj['children']:
                                        objs.append(obj['title']+"/"+child['title'])
                            objs.sort()
                            t['members'] = objs
                    elif t['name'] == 'CustomObject':              
                        if 'members' in t and type(t['members']) is not list:
                            if t['members'] == "*":
                                mlist = self.listMetadata('CustomObject', False)
                                objs = []
                                for obj in mlist:
                                    if ('__c') not in mlist:
                                        objs.append(obj['fullName'])
                                objs.append("*")
                                objs.sort()
                                t['members'] = objs
            
            request_payload = package
            debug('---request payload---')
            debug(request_payload)
        result = self._handleResultTyping(self._sforce.service.retrieve(request_payload))
        
        debug('result of retrieve: \n\n')
        debug(result)

        if result.done == False:
            debug('---> result is not done')
            if int(float(util.SFDC_API_VERSION)) > 30:
                return self._waitForRetrieveRequest(result.id) # will loop until done
            else:
                self._waitForRetrieveRequest(result.id) # will loop until done
                return self._getRetrieveBody(result.id)
        else:
            return result
Exemplo n.º 21
0
Arquivo: metadata.py Projeto: azam/mm
    def execute(self):
        project = config.project
        sfdc_client = config.sfdc_client
        files = self.params.get('files', None)
        for f in files:
            if '-meta.xml' in f:
                corresponding_file = f.split('-meta.xml')[0]
                if corresponding_file not in files:
                    files.append(corresponding_file)
        for f in files:
            if '-meta.xml' in f:
                continue
            file_ext = f.split('.')[-1]
            metadata_type = util.get_meta_type_by_suffix(file_ext)
            if metadata_type['metaFile'] == True:
                corresponding_file = f + '-meta.xml'
                if corresponding_file not in files:
                    files.append(corresponding_file)

        metadata_package_dict = util.get_metadata_hash(files)
        tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True)
        package_xml = util.get_package_xml_contents(metadata_package_dict)
        util.put_package_xml_in_directory(tmp_unpackaged, package_xml, True)
        empty_package_xml = util.get_empty_package_xml_contents()
        util.put_empty_package_xml_in_directory(tmp_unpackaged, empty_package_xml)
        zip_file = util.zip_directory(tmp, tmp)
        
        purge_on_delete_setting = config.connection.get_plugin_client_setting("mm_purge_on_delete", False);
        if purge_on_delete_setting:
            describe_result = config.sfdc_client.describeMetadata(retXml=False)
            if describe_result.testRequired == True:
                purge_on_delete_setting = False

        deploy_params = {
            "zip_file"          : zip_file,
            "rollback_on_error" : True,
            "ret_xml"           : True,
            "purge_on_delete"   : purge_on_delete_setting
        }
        delete_result = sfdc_client.delete(deploy_params)
        d = xmltodict.parse(delete_result,postprocessor=util.xmltodict_postprocessor)
        shutil.rmtree(tmp)
        result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']
        if result['success'] == True:
            removed = []
            for f in files:
                try:
                    file_ext = f.split('.')[-1]
                    metadata_type = util.get_meta_type_by_suffix(file_ext)
                    if metadata_type == None or not 'directoryName' in metadata_type:
                        continue;
                    directory = metadata_type['directoryName']
                    filepath = os.path.join(project.location, "src", directory, f)
                    metapath = os.path.join(project.location, "src", directory, f + '-meta.xml')
                    os.remove(filepath)
                    os.remove(metapath)
                    # remove the entry in file properties
                    project.conflict_manager.remove_from_local_store(f)
                    removed.append(f)
                except Exception, e:
                    print e.message
            return util.generate_success_response("Removed metadata files: " + (",".join(removed)))
Exemplo n.º 22
0
Arquivo: metadata.py Projeto: azam/mm
    def execute(self):        
        project = config.project

        files = self.params.get('files', None)
        use_tooling_api = config.connection.get_plugin_client_setting('mm_compile_with_tooling_api', False)
        check_for_conflicts = config.connection.get_plugin_client_setting('mm_compile_check_conflicts', False)

        compiling_apex_metadata = True
        for f in files:
            if f.split('.')[-1] not in util.TOOLING_API_EXTENSIONS:
                #cannot use tooling api
                compiling_apex_metadata = False
                break

        #when compiling apex metadata, check to see if it is newer on the server
        if check_for_conflicts and compiling_apex_metadata:
            if 'action' not in self.params or self.params['action'] != 'overwrite':
                has_conflict, msg = config.project.conflict_manager.check_for_conflicts(files)
                if has_conflict:
                    return msg
     
        #use tooling api here, if possible
        if use_tooling_api == True and compiling_apex_metadata and int(float(util.SFDC_API_VERSION)) >= 27:
            if 'metadata_container' not in project.settings or project.settings['metadata_container'] == None:
                container_id = project.sfdc_client.get_metadata_container_id()
                new_settings = project.settings
                new_settings['metadata_container'] = container_id
                project.put_settings_file(new_settings)
            else:
                container_id = project.settings['metadata_container']
            
            file_ext = files[0].split('.')[-1]
            try:
                result = project.sfdc_client.compile_with_tooling_api(files, container_id)
            except MetadataContainerException as e:
                project.sfdc_client.delete_mavensmate_metadatacontainers_for_this_user()
                response = project.sfdc_client.new_metadatacontainer_for_this_user()
                project.update_setting("metadata_container",response["id"])
                return CompileSelectedMetadataCommand(params=self.params,args=self.args).execute()

            if 'Id' in result and 'State' in result:
                if result['State'] == 'Completed':
                    project.conflict_manager.refresh_local_store(files=files)
                return util.generate_response(result)

        #the user has either chosen not to use the tooling api, or it's non apex metadata
        else:
            try:
                for f in files:
                    if '-meta.xml' in f:
                        corresponding_file = f.split('-meta.xml')[0]
                        if corresponding_file not in files:
                            files.append(corresponding_file)
                for f in files:
                    if '-meta.xml' in f:
                        continue
                    file_ext = f.split('.')[-1]
                    metadata_type = util.get_meta_type_by_suffix(file_ext)
                    if metadata_type == None:
                        if sys.platform == "win32":
                            dir_parts = f.split("\\")
                        else:
                            dir_parts = f.split("/")
                        if 'documents' in dir_parts:
                            metadata_type = util.get_meta_type_by_name("Document") 
                    if metadata_type != None and 'metaFile' in metadata_type and metadata_type['metaFile'] == True:
                        corresponding_file = f + '-meta.xml'
                        if corresponding_file not in files:
                            files.append(corresponding_file)

                metadata_package_dict = util.get_metadata_hash(files)
                #debug(metadata_package_dict)
                tmp = util.put_tmp_directory_on_disk()
                os.makedirs(os.path.join(tmp,"unpackaged"))
                #copy files from project directory to tmp
                for full_file_path in files:
                    if 'package.xml' in full_file_path:
                        continue
                    if config.is_windows: 
                        destination = os.path.join(tmp,'unpackaged',full_file_path.split('\src\\')[1])
                    else:
                        destination = os.path.join(tmp,'unpackaged',full_file_path.split('/src/')[1])
                    destination_directory = os.path.dirname(destination)
                    if not os.path.exists(destination_directory):
                        os.makedirs(destination_directory)
                    shutil.copy2(full_file_path, destination_directory)

                package_xml = util.get_package_xml_contents(metadata_package_dict)
                util.put_package_xml_in_directory(os.path.join(tmp,"unpackaged"), package_xml)
                zip_file = util.zip_directory(tmp, tmp)
                deploy_params = {
                    "zip_file"          : zip_file,
                    "rollback_on_error" : True,
                    "ret_xml"           : True
                }
                deploy_result = project.sfdc_client.deploy(deploy_params)

                d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor)
                result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']
                shutil.rmtree(tmp)

                # Get new properties for the files we just compiled
                if result['success'] == True:
                    project.conflict_manager.refresh_local_store(files=files)

                return json.dumps(result)

            except Exception, e:
                try:
                    shutil.rmtree(tmp)
                except:
                    pass
                return util.generate_error_response(e.message)
Exemplo n.º 23
0
Arquivo: metadata.py Projeto: azam/mm
    def execute(self):
        project = config.project
        sfdc_client = config.sfdc_client

        metadata_type                   = self.params.get('metadata_type', None)
        api_name                        = self.params.get('api_name', None)
        apex_class_type                 = self.params.get('apex_class_type', None)
        apex_trigger_object_api_name    = self.params.get('apex_trigger_object_api_name', None)
        apex_trigger_object_api_name    = self.params.get('apex_trigger_object_api_name', None)
        github_template                 = self.params.get('github_template', None)

        if metadata_type == 'ApexClass' and apex_class_type == None:
            apex_class_type = 'default'

        if api_name == None:
            return util.generate_error_response("You must provide a name for the new metadata.")

        if sfdc_client.does_metadata_exist(object_type=metadata_type, name=api_name) == True:
            mt = util.get_meta_type_by_name(metadata_type)
            filepath = os.path.join(project.location, 'src', mt['directoryName'], api_name+'.'+mt['suffix'])
            fetched = ""
            if not os.path.exists(filepath):
                self.params['files'] = [filepath]
                refresh_selected_metadata(self.params)
                fetched = ", fetched metadata file from server"
            raise MMException("This API name is already in use in your org" + fetched + ".")      

        tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True)
        
        util.put_skeleton_files_on_disk(metadata_type, api_name, tmp_unpackaged, apex_class_type, apex_trigger_object_api_name, github_template)
        package_xml_body = util.get_package_xml_contents({metadata_type : [ api_name ]})
        util.put_package_xml_in_directory(tmp_unpackaged, package_xml_body)
        zip_file = util.zip_directory(tmp, tmp)
        deploy_params = {
            "zip_file"          : zip_file,
            "rollback_on_error" : True,
            "ret_xml"           : True
        }
        deploy_result = sfdc_client.deploy(deploy_params)
        d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor)
        meta_dir = ""
        files = []
        path = None
        for dirname, dirnames, filenames in os.walk(tmp_unpackaged):
            for filename in filenames:
                if 'package.xml' in filename:
                    continue
                full_file_path = os.path.join(dirname, filename)
                if '-meta.xml' in filename:
                    extension = filename.replace('-meta.xml','').split(".")[-1]
                else:
                    extension = filename.split(".")[-1]
                mt = util.get_meta_type_by_suffix(extension)
                if mt != None: 
                    meta_dir = mt['directoryName']
                    path = os.path.join(project.location, 'src', meta_dir)
                    if not os.path.exists(path):
                        os.makedirs(path)
                    files.append(os.path.join(path, filename))
                elif extension != "xml":
                    continue;
                # only apex files and meta.xml files should make it to here
                shutil.copy(full_file_path, path)
        shutil.rmtree(tmp)
        
        project.update_package_xml_with_metadata(metadata_type, api_name)
        project.conflict_manager.refresh_local_store(files=files)

        return json.dumps(d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result'])
Exemplo n.º 24
0
    def execute(self):        
        project = config.project

        files = self.params.get('files', None)
        use_tooling_api = config.connection.get_plugin_client_setting('mm_compile_with_tooling_api', False)
        check_for_conflicts = config.connection.get_plugin_client_setting('mm_compile_check_conflicts', False)

        compiling_apex_metadata = True
        for f in files:
            if f.split('.')[-1] not in util.TOOLING_API_EXTENSIONS:
                #cannot use tooling api
                compiling_apex_metadata = False
                break

        #when compiling apex metadata, check to see if it is newer on the server
        if check_for_conflicts and compiling_apex_metadata:
            if 'action' not in self.params or self.params['action'] != 'overwrite':
                has_conflict, msg = config.project.conflict_manager.check_for_conflicts(files)
                if has_conflict:
                    return msg
     
        #use tooling api here, if possible
        if use_tooling_api == True and compiling_apex_metadata and int(float(util.SFDC_API_VERSION)) >= 27:
            if 'metadata_container' not in project.settings or project.settings['metadata_container'] == None:
                container_id = project.sfdc_client.get_metadata_container_id()
                new_settings = project.settings
                new_settings['metadata_container'] = container_id
                project.put_settings_file(new_settings)
            else:
                container_id = project.settings['metadata_container']
            
            file_ext = files[0].split('.')[-1]
            try:
                result = project.sfdc_client.compile_with_tooling_api(files, container_id)
            except MetadataContainerException as e:
                project.sfdc_client.delete_mavensmate_metadatacontainers_for_this_user()
                response = project.sfdc_client.new_metadatacontainer_for_this_user()
                project.update_setting("metadata_container",response["id"])
                #return CompileSelectedMetadataCommand(params=self.params,args=self.args).execute()
                #ensure only a single retry
                result = project.sfdc_client.compile_with_tooling_api(files, response["id"])

            if 'Id' in result and 'State' in result:
                if result['State'] == 'Completed':
                    project.conflict_manager.refresh_local_store(files=files)
                return util.generate_response(result)

        #the user has either chosen not to use the tooling api, or it's non apex metadata
        else:
            try:
                for f in files:
                    if '-meta.xml' in f:
                        corresponding_file = f.split('-meta.xml')[0]
                        if corresponding_file not in files:
                            files.append(corresponding_file)
                for f in files:
                    if '-meta.xml' in f:
                        continue
                    file_ext = f.split('.')[-1]
                    metadata_type = util.get_meta_type_by_suffix(file_ext)
                    if metadata_type == None:
                        if sys.platform == "win32":
                            dir_parts = f.split("\\")
                        else:
                            dir_parts = f.split("/")
                        if 'documents' in dir_parts:
                            metadata_type = util.get_meta_type_by_name("Document") 
                    if metadata_type != None and 'metaFile' in metadata_type and metadata_type['metaFile'] == True:
                        corresponding_file = f + '-meta.xml'
                        if corresponding_file not in files:
                            files.append(corresponding_file)

                metadata_package_dict = util.get_metadata_hash(files)
                #debug(metadata_package_dict)
                tmp = util.put_tmp_directory_on_disk()
                os.makedirs(os.path.join(tmp,"unpackaged"))
                #copy files from project directory to tmp
                for full_file_path in files:
                    if 'package.xml' in full_file_path:
                        continue
                    if config.is_windows: 
                        destination = os.path.join(tmp,'unpackaged',full_file_path.split('\src\\')[1])
                    else:
                        destination = os.path.join(tmp,'unpackaged',full_file_path.split('/src/')[1])
                    destination_directory = os.path.dirname(destination)
                    if not os.path.exists(destination_directory):
                        os.makedirs(destination_directory)
                    shutil.copy2(full_file_path, destination_directory)

                package_xml = util.get_package_xml_contents(metadata_package_dict)
                util.put_package_xml_in_directory(os.path.join(tmp,"unpackaged"), package_xml)
                zip_file = util.zip_directory(tmp, tmp)
                deploy_params = {
                    "zip_file"          : zip_file,
                    "rollback_on_error" : True,
                    "ret_xml"           : True
                }
                deploy_result = project.sfdc_client.deploy(deploy_params)

                d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor)
                result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']
                shutil.rmtree(tmp)

                # Get new properties for the files we just compiled
                if result['success'] == True:
                    project.conflict_manager.refresh_local_store(files=files)

                return json.dumps(result)

            except Exception, e:
                try:
                    shutil.rmtree(tmp)
                except:
                    pass
                return util.generate_error_response(e.message)
Exemplo n.º 25
0
 def xmltodict(self, fp, **kwargs):
     """returns a dict as created by xmltodict"""
     return xmltodict_parser.parse(fp, **kwargs)
Exemplo n.º 26
0
from pyexpat import ExpatError
import lib.xmltodict as xmltodict
import json

with open('local.xml') as f:
    data = f.read()
    try:
        doc = xmltodict.parse(data)
        config = json.dumps(doc)
        config_dict = json.loads(config)
        if config_dict["config"]["global"]["resources"]["db"][
                "table_prefix"] == None:
            prefix = ""
        else:
            prefix = config_dict["config"]["global"]["resources"]["db"][
                "table_prefix"]
        # print(config_dict["config"]["global"]["resources"]["db"]["table_prefix"])
        # config_dict["config"]["global"]["resources"]["default_setup"]["connection"]["host"] = "testing"
        # print(config_dict["config"]["global"]["resources"]["default_setup"]["connection"]["host"])
        # print(xmltodict.unparse(config_dict, pretty=True))
        print(prefix)
    except ExpatError:
        doc = xmltodict.parse(data[2:])
        print(json.dumps(doc))
        config = json.dumps(doc)
Exemplo n.º 27
0
 def xmltodict(self, fp, **kwargs):
     """returns a dict as created by xmltodict"""
     return xmltodict_parser.parse(fp, **kwargs)
Exemplo n.º 28
0
def updateRecords(records,LOGGER=settings.LOGGER):

  if not records:
    LOGGER.debug("No records given")
    return []

  targets = dict(records)

  s = time.time()
  records = ADSRecords('full','XML')
  failures = []
  for bibcode in targets.keys():
    try:
      records.addCompleteRecord(bibcode)
    except KeyboardInterrupt:
      raise
    except:
      failures.append(bibcode)
      LOGGER.warning("[%s] ADSRecords failed" % bibcode)
  records = records.export()
  if not records.content:
    return []
  ttc = time.time()-s
  rate = len(targets)/ttc
  if failures:
    LOGGER.warning('ADSRecords failed to retrieve %s records' % len(failures))
  LOGGER.info('ADSRecords took %0.1fs to query %s records (%0.1f rec/s)' % (ttc,len(targets),rate))

  records = ensureList(xmltodict.parse(records.__str__())['records']['record'])
  with open('raw.txt','a') as fp:
    for r in records:
      fp.write('%s' % r)
      fp.write('\n\n')
  assert(len(records)==len(targets)-len(failures))

  #Could send these tasks out on a queue
  completeRecords = []
  for r in records:
    #Define top-level schema that will go in mongo
    cr = {
      'bibcode': r['@bibcode'],
      'JSON_fingerprint': targets[r['@bibcode']],
      'metadata' : {},
    }

    #Find metadata blocks that need merging
    metadataCounter = collections.Counter([entry['@type'] for entry in r['metadata']])
    needsMerging = dict([(k,[]) for k,v in metadataCounter.iteritems() if v>1])

    #Iterate over metadata blocks; directly input single defined blocks
    #and build a 'needsMerging' list to merge in the next step
    for metadataBlock in r['metadata']: 
      for field,data in metadataBlock.iteritems():
        if field in NORMALIZE_SCHEMA:
          metadataBlock[field] = NORMALIZE_SCHEMA[field](data)
      if metadataBlock['@type'] not in needsMerging:
        cr['metadata'].update({metadataBlock['@type']:metadataBlock})
      else: #If it shows up more than once, it needs merging.
        needsMerging[metadataBlock['@type']].append(metadataBlock)
    #Now merge the multiply defined metadataBlocks
    for entryType,data in needsMerging.iteritems():
      cr['metadata'].update({entryType:merge(data,r['@bibcode'],entryType,LOGGER)})
    
    #Finally, we have a complete record
    completeRecords.append(enforceSchema(cr))

  LOGGER.info('Added %s complete records' % len(completeRecords))
  return completeRecords
Exemplo n.º 29
0
### redirecionar o erro é necessário para determinar quais mensagens aparecem para o usuário.
status_comando = os.system("lshw -xml > '" + computerFolder +
                           "/lshw.xml' 2> '" + computerFolder +
                           "/.lshw_error_messages'")
if (status_comando == 0):
    print("OK!")
else:
    print("Erro ao obter lshw: " + str(status_comando))

### Remove o arquivo temporário.
os.remove(computerFolder + "/.lshw_error_messages")

lshw_xml = open(computerFolder + "/lshw.xml", 'r')
texto = lshw_xml.read()
lshw_xml.close()
lshw_dict = xmltodict.parse(texto)  # lshw em forma de dicionário!!

# Obtém o log do kernel pelo dmesg:

print("Obtendo o log do kernel... ")

status_comando = os.system(
    "dmesg > '" + computerFolder + "/dmesg.txt'"
)  # Verificar a possibilidade de gravar a cor de cada linha neste arquivo.
if (status_comando == 0):
    print("OK!")
else:
    print("Erro ao obter dmesg: " + str(status_comando))

# Agora inicializamos o dicionário e atribuímos os dados:
Exemplo n.º 30
0
def convert(xml_file, xml_attribs=True):
    with open(xml_file, "rb") as f:
        d = xmltodict.parse(f, xml_attribs=xml_attribs)
        return json.dumps(d, indent=4)
Exemplo n.º 31
0
    def execute(self):
        project = config.project
        sfdc_client = config.sfdc_client

        metadata_type                   = self.params.get('metadata_type', None)
        github_template                 = self.params.get('github_template', None)
        params                          = self.params.get('params', None)

        if params == None:
            raise MMException('The payload to create metadata has recently changed. If you are using Sublime Text, you likely need to update your MavensMate plugin to 3.4.8+')

        if "api_name" not in params or params["api_name"] == None:
            return util.generate_error_response("You must provide a name for the new metadata.")

        api_name = params.get('api_name')

        if sfdc_client.does_metadata_exist(object_type=metadata_type, name=api_name) == True:
            mt = util.get_meta_type_by_name(metadata_type)
            filepath = os.path.join(project.location, 'src', mt['directoryName'], api_name+'.'+mt['suffix'])
            fetched = ""
            if not os.path.exists(filepath):
                self.params['files'] = [filepath]
                RefreshSelectedMetadataCommand(params=self.params,args=self.args).execute()
                fetched = ", fetched metadata file from server"
            raise MMException("This API name is already in use in your org" + fetched + ".")      

        tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True)
        
        util.put_skeleton_files_on_disk(metadata_type, tmp_unpackaged, github_template, params)
        package_xml_body = util.get_package_xml_contents({metadata_type : [ api_name ]})
        util.put_package_xml_in_directory(tmp_unpackaged, package_xml_body)
        zip_file = util.zip_directory(tmp, tmp)
        deploy_params = {
            "zip_file"          : zip_file,
            "rollback_on_error" : True,
            "ret_xml"           : True
        }
        deploy_result = sfdc_client.deploy(deploy_params)
        d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor)
        meta_dir = ""
        files = []
        path = None
        for dirname, dirnames, filenames in os.walk(tmp_unpackaged):
            for filename in filenames:
                if 'package.xml' in filename:
                    continue
                full_file_path = os.path.join(dirname, filename)
                if '-meta.xml' in filename:
                    extension = filename.replace('-meta.xml','').split(".")[-1]
                else:
                    extension = filename.split(".")[-1]
                mt = util.get_meta_type_by_suffix(extension)
                if mt != None: 
                    meta_dir = mt['directoryName']
                    path = os.path.join(project.location, 'src', meta_dir)
                    if not os.path.exists(path):
                        os.makedirs(path)
                    files.append(os.path.join(path, filename))
                elif extension != "xml":
                    continue;
                # only apex files and meta.xml files should make it to here
                shutil.copy(full_file_path, path)
        shutil.rmtree(tmp)
        
        project.update_package_xml_with_metadata(metadata_type, api_name)
        project.conflict_manager.refresh_local_store(files=files)

        return json.dumps(d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result'])
Exemplo n.º 32
0
    def retrieve(self, **kwargs):
        # request = {
        #   'RetrieveRequest': {
        #     'unpackaged': {
        #       'types': {
        #         'ApexTrigger': '*'
        #       }
        #     },
        #     'apiVersion': {
        #       25.0
        #     }
        #   }
        # }
        # package = {
        #     'unpackaged' : {
        #         'types' : [
        #             {
        #                 "members": "*",
        #                 "name": "ApexClass"
        #             }
        #         ]
        #     }
        # }
        package_dict = None
        request_payload = None

        debug('retrieve request: ')
        debug(kwargs['package'])

        if 'package' in kwargs and type(kwargs['package']) is not dict:
            #if package is location of package.xml, we'll parse the xml and create a request
            package_dict = xmltodict.parse(
                util.get_file_as_string(kwargs['package']))
            api_version = package_dict['Package']['version']
            package_dict['unpackaged'] = package_dict.pop('Package')
            package_dict['unpackaged'].pop('version')
            package_dict['unpackaged'].pop("@xmlns", None)
            package_dict['unpackaged'].pop("#text", None)
            package_dict['apiVersion'] = api_version
            types = package_dict['unpackaged']['types']
            if type(types) is not list:
                types = [types]
            if type(package_dict['unpackaged']['types']) is not list:
                package_dict['unpackaged']['types'] = [
                    package_dict['unpackaged']['types']
                ]
            requested_types = []
            if 'type' in kwargs and kwargs['type'] != None and kwargs[
                    'type'] != '':  #if the request is for a certain type, only request that type
                for i, val in enumerate(types):
                    if val['name'] == kwargs['type']:
                        requested_types.append(val)
                package_dict['unpackaged']['types'] = requested_types
                types = requested_types
            for i, val in enumerate(types):
                try:
                    package_dict['unpackaged']['types'][i].pop("#text", None)
                except:
                    package_dict['unpackaged']['types'].pop("#text", None)

            #if custom object is asterisked, we need to explictly retrieve standard objects
            for t in package_dict['unpackaged']['types']:
                if 'name' in t:
                    metadata_type_def = util.get_meta_type_by_name(t['name'])
                    if 'inFolder' in metadata_type_def and metadata_type_def[
                            'inFolder']:
                        if 'members' in t and type(t['members']) is not list:
                            if t['members'] == "*" or t['members'] == []:
                                mlist = self.listMetadata(t['name'], False)
                                objs = []
                                for obj in mlist:
                                    objs.append(obj['fullName'])
                                objs.sort()
                                t['members'] = objs
                    elif t['name'] == 'CustomObject':
                        if 'members' in t and type(t['members']) is not list:
                            if t['members'] == "*":
                                mlist = self.listMetadata(
                                    'CustomObject', False)
                                objs = []
                                for obj in mlist:
                                    if ('__c') not in mlist:
                                        objs.append(obj['fullName'])
                                objs.append("*")
                                objs.sort()
                                t['members'] = objs

            request_payload = package_dict

        elif 'package' in kwargs and type(kwargs['package']) is dict:
            package = kwargs['package']
            if 'unpackaged' not in package:
                #{ "ApexClass"    : ["MultiselectControllerTest","MultiselectController"] }
                type_array = []
                for i, metadata_type in enumerate(package):
                    member_value = package[metadata_type]
                    type_array.append({
                        "name": metadata_type,
                        "members": member_value
                    })

                package = {
                    'unpackaged': {
                        'types': type_array
                    },
                    'apiVersion': util.SFDC_API_VERSION
                }

            #if custom object is asterisked, we need to explictly retrieve standard objects
            for t in package['unpackaged']['types']:
                debug('----> ')
                debug(t)
                if 'name' in t:
                    metadata_type_def = util.get_meta_type_by_name(t['name'])
                    debug(metadata_type_def)
                    if 'inFolder' in metadata_type_def and metadata_type_def[
                            'inFolder']:
                        if 'members' in t and (t['members'] == "*"
                                               or t['members'] == []):
                            #list_request_name = self.__transformFolderMetadataNameForListRequest(t['name'])
                            #mlist = self.listMetadata(list_request_name, False)
                            mlist = self.listMetadataAdvanced(t['name'])
                            objs = []
                            for obj in mlist:
                                debug('---obj')
                                debug(obj)
                                objs.append(obj['title'])
                                if 'children' in obj and type(
                                        obj['children'] is list):
                                    for child in obj['children']:
                                        objs.append(obj['title'] + "/" +
                                                    child['title'])
                            objs.sort()
                            t['members'] = objs
                    elif t['name'] == 'CustomObject':
                        if 'members' in t and type(t['members']) is not list:
                            if t['members'] == "*":
                                mlist = self.listMetadata(
                                    'CustomObject', False)
                                objs = []
                                for obj in mlist:
                                    if ('__c') not in mlist:
                                        objs.append(obj['fullName'])
                                objs.append("*")
                                objs.sort()
                                t['members'] = objs

            request_payload = package
            debug('---request payload---')
            debug(request_payload)
        result = self._handleResultTyping(
            self._sforce.service.retrieve(request_payload))
        if result.done == False:
            self._waitForRetrieveRequest(result.id)
            return self._getRetrieveBody(result.id)
        else:
            return result
Exemplo n.º 33
0
    def execute(self):
        project = config.project
        sfdc_client = config.sfdc_client
        files = self.params.get('files', None)
        for f in files:
            if '-meta.xml' in f:
                corresponding_file = f.split('-meta.xml')[0]
                if corresponding_file not in files:
                    files.append(corresponding_file)
        for f in files:
            if '-meta.xml' in f:
                continue
            file_ext = f.split('.')[-1]
            metadata_type = util.get_meta_type_by_suffix(file_ext)
            if metadata_type['metaFile'] == True:
                corresponding_file = f + '-meta.xml'
                if corresponding_file not in files:
                    files.append(corresponding_file)

        metadata_package_dict = util.get_metadata_hash(files)
        tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True)
        package_xml = util.get_package_xml_contents(metadata_package_dict)
        util.put_package_xml_in_directory(tmp_unpackaged, package_xml, True)
        empty_package_xml = util.get_empty_package_xml_contents()
        util.put_empty_package_xml_in_directory(tmp_unpackaged, empty_package_xml)
        zip_file = util.zip_directory(tmp, tmp)
        
        purge_on_delete_setting = config.connection.get_plugin_client_setting("mm_purge_on_delete", False);
        if purge_on_delete_setting:
            describe_result = config.sfdc_client.describeMetadata(retXml=False)
            if describe_result.testRequired == True:
                purge_on_delete_setting = False

        deploy_params = {
            "zip_file"          : zip_file,
            "rollback_on_error" : True,
            "ret_xml"           : True,
            "purge_on_delete"   : purge_on_delete_setting
        }
        delete_result = sfdc_client.delete(deploy_params)
        d = xmltodict.parse(delete_result,postprocessor=util.xmltodict_postprocessor)
        shutil.rmtree(tmp)
        result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']
        if result['success'] == True:
            removed = []
            for f in files:
                try:
                    file_ext = f.split('.')[-1]
                    metadata_type = util.get_meta_type_by_suffix(file_ext)
                    if metadata_type == None or not 'directoryName' in metadata_type:
                        continue;
                    directory = metadata_type['directoryName']
                    filepath = os.path.join(project.location, "src", directory, f)
                    metapath = os.path.join(project.location, "src", directory, f + '-meta.xml')
                    os.remove(filepath)
                    os.remove(metapath)
                    # remove the entry in file properties
                    project.conflict_manager.remove_from_local_store(f)
                    removed.append(f)
                except Exception, e:
                    print e.message
            return util.generate_success_response("Removed metadata files: " + (",".join(removed)))
Exemplo n.º 34
0
def xml_to_dict(adsrecords):
    """
  wrapper for parsing XML
  :param adsrecords: adsrecords object
  """
    return xmltodict.parse(adsrecords.serialize())
Exemplo n.º 35
0
 def parse_dom_disks(self, xmldesc):
   result = list()
   xmld = xmltodict.parse(xmldesc)
   for i in xmld['domain']['devices']['disk']:
     result.append(i['target']['@dev'])
   return result