Exemplo n.º 1
0
 def getstd(self):
     c=self.elem.find('LeedSummary').findall('Eap245PerformanceRatingMethodCompliance')
     s=pd.DataFrame(pr.data(c)['Eap245PerformanceRatingMethodCompliance'])[:-1].melt(id_vars='name')
     s = s[s['value']>0]
     s = s.set_index(s.name+':'+s.variable)['value']
     
     c = self.elem.find('SystemSummary').findall('TimeSetpointNotMet')[-1]
     s=s.append(pd.Series(pr.data(c))[1:])
     
     s['EUI']=float(self.elem.find('AnnualBuildingUtilityPerformanceSummary').findall('SiteAndSourceEnergy')[1].find('EnergyPerTotalBuildingArea').text)
     
     coils=pr.data(self.elem.find('EquipmentSummary').findall('CoolingCoils'))['CoolingCoils']
     s['ClCoilSens']=pd.DataFrame(coils,index=range(max(1,isinstance(coils,list)*len(coils))))[['NominalSensibleCapacity']].sum()[0]
     
     coils=pr.data(self.elem.find('EquipmentSummary').findall('CoolingCoils'))['CoolingCoils']
     s['ClcoilLat']=pd.DataFrame(coils,index=range(max(1,isinstance(coils,list)*len(coils))))[['NominalLatentCapacity']].sum()[0]
     
     coils=pr.data(self.elem.find('EquipmentSummary').findall('HeatingCoils'))['HeatingCoils']
     s['HtCoil']=pd.DataFrame(coils,index=range(max(1,isinstance(coils,list)*len(coils))))[['NominalTotalCapacity']].sum()[0]
     
     s['clgCFM'] = pd.DataFrame(pr.data(self.elem.find('HvacSizingSummary').findall('ZoneSensibleCooling'))['ZoneSensibleCooling'])['UserDesignAirFlow'].sum()
     
     s['oaCFM'] = pd.DataFrame(pr.data(self.elem.find('HvacSizingSummary').findall('ZoneSensibleCooling'))['ZoneSensibleCooling'])['MinimumOutdoorAirFlowRate'].sum()
     
     s['htgCFM'] = pd.DataFrame(pr.data(self.elem.find('HvacSizingSummary').findall('ZoneSensibleHeating'))['ZoneSensibleHeating'])['UserDesignAirFlow'].sum()
     return s
Exemplo n.º 2
0
    def _get_acl_rules(self, rest_device, address_type, acl_type, acl_name,
                       sequences):
        """
        Return list of rules configured for acl_name
        """
        rules_list = []
        method = address_type + '_access_list_' + acl_type + '_seq_get'
        config = (method, {acl_type: acl_name, 'resource_depth': 2})
        output = rest_device._callback(config, handler='get_config')
        util = Util(output.data)

        for rcvd_seq in util.root.findall(".//seq"):
            if rcvd_seq is not None:
                seq_id = int(rcvd_seq.find('seq-id').text)
                if seq_id in sequences:
                    sequences.remove(seq_id)
                    pd = parker.data(rcvd_seq)

                    new_pd = {}
                    # Replace "-" with "_"
                    for k, v in pd.iteritems():
                        nk = k.replace("-", "_")
                        new_pd[nk] = v

                    rules_list.append(new_pd)

        return rules_list
Exemplo n.º 3
0
    def download_subjects(self, job):
        # download subjects
        info('downloading subjects definition from "' + self.xml_stats_url +
             '"...')

        # download xml
        stats_xml = os.path.join(job.job_dir, 'subjects.xml')
        urllib.request.urlretrieve(self.xml_stats_url, stats_xml)

        # parse xml to json
        root = ET.parse(stats_xml).getroot()
        subjects = parker.data(
            root.find("./achievements"),
            preserve_root=True).get('achievements').get('achievement')
        store_json(subjects, job.subjects_file)

        # download icons
        info('downloading subjects...')
        i = 0
        total = len(subjects)
        progress(i, total)
        for subject in subjects:
            subject_url = subject.get('iconClosed')
            urllib.request.urlretrieve(
                subject_url,
                os.path.join(job.subjects_dir, ntpath.basename(subject_url)))
            i = i + 1
            progress(i, total)
        success('stored ' +
                str(len(glob.glob(os.path.join(job.subjects_dir, "*.jpg")))) +
                ' subjects to ' + job.subjects_dir)
def save_to_mongo(content):
    client = MongoClient()
    ##    client = MongoClient("192.168.1.194", 27107)
    dbtemp = client.temp
    ##    timestmp = datetime.datetime.now().strftime("%Y-%d-%d %H:%M:%S")
    timestmp = datetime.datetime.utcnow()

    xmlread = content.decode('ascii')
    xmlread = fromstring(xmlread)

    newfile = dumps(parker.data(xmlread))
    data = loads(newfile)
    result = dbtemp.new_Ireq_data.insert_one(data)

    db = client.Ireqdata

    copycursor = db.req_data.find()

    ##    db.req_archive.drop_indexes()
    ##    db.req_archive.reindex()
    db.req_data.drop_indexes()
    db.req_data.reindex()

    for document in copycursor:
        db.req_archive.insert(document)
        print(document)

    cursor = dbtemp.new_Ireq_data.distinct(
        "Unit.Packet.Payload.ResultSet.Jobs.Job")
    for document in cursor:
        newreq = {"dateAdded": timestmp, "req": document}
        db.Ireq_data.insert_one(newreq)
    dbtemp.new_Ireq_data.drop()
Exemplo n.º 5
0
    def recv_data_object(self):
        frame = self.recv_frame()
        data_frame = self.frame_data_parse(frame)
        xml_data = self.frame_data_xml(data_frame)

        ret_val = dumps(p.data(fromstring(xml_data)))

        return ret_val
Exemplo n.º 6
0
def annotation_xml_to_json(xml_file):
    f = open(xml_file, "rb")
    json_dict = keep_keys(parker.data(fromstring(f.read())))
    if not "object" in json_dict:
        json_dict = {"object": {}}
    json_output = json.dumps(json_dict["object"])
    f.close()
    return json_output
Exemplo n.º 7
0
    def _get_data(self):
        """Get the status from the phone system."""
        raw_res = requests.get(
            STATUS_URL.format(self.hostname, self.username, self.password))

        if not raw_res.status_code == 200:
            raise PyCiscoSPAError("Login Error: Bad HTTP status code.")
        self._data = parker.data(fromstring(raw_res.content))
Exemplo n.º 8
0
 def convert_response_to_json(self, response_xml):
     return {
         "data": parker.data(response_xml),
         "meta": {
             "version": version,
             "author": author,
             "id": generate_file_id()
         }
     }
Exemplo n.º 9
0
def xml_to_json(path):
    with open(path, 'r') as r:
        root = fromstring(r.read())
        root_attributes = {key.lower(): value.lower() for key, value in root.attrib.items()}
        if 'convention' in root_attributes:
            convention = root_attributes['convention']
            if convention == 'parker':
                return parker.data(root)
        return badgerfish.data(root)
Exemplo n.º 10
0
def transform_xml_to_json(xml_file_input, json_file_output):
    """Transform Single File"""
    with open(xml_file_input, 'r') as f1:
        xml_text = f1.read()
    # Create directory to store output file , if already exists do nothing
    os.makedirs(os.path.dirname(json_file_output), exist_ok=True)
    with open(json_file_output, 'w') as f2:
        json_text = dumps(parker.data(fromstring((xml_text))))
        f2.write(json_text)
Exemplo n.º 11
0
def get_stat(key):
    url = get_setting('fse_datafeed_stat')
    url = url.format(key, key)
    response = requests.get(url)
    if "Currently Closed for Maintenance" in response.text:
        raise RuntimeError(
            'FSEconomy is currently down for maintenance. Please try again later.'
        )
    clean = re.sub(r'<StatisticItems[^>]+>', '<StatisticItems>', response.text)
    data = parker.data(fromstring(clean))
    return data['Statistic']
Exemplo n.º 12
0
def parse_and_save_data(experiement_file):
    experiment_data = ExperimentData()
    experiment_data.experiment_file = experiement_file
    if "xml" in experiment_data.experiment_file.get_file_mime():
        root = parse(
            StringIO(experiment_data.experiment_file.get_file_content()))
        experiment_data.data = dumps(parker.data(root.getroot()))
        experiment_data.parsing_method = 'XML'

    experiment_data.parsing_datetime = datetime.datetime.now()
    experiment_data.save()
Exemplo n.º 13
0
Arquivo: rpc.py Projeto: cm58/rtremote
 def system_multicall(self, commands):
     body = "<?xml version='1.0'?><methodCall><methodName>system.multicall</methodName><params><param><value><array><data>"
     if commands:
         for c in commands:
             body += '<value>' + self.get_struct(c) + '</value>'
     body += "</data></array></value></param></params></methodCall>"
     scgi = Scgi(self.host_port)
     resp = scgi.post(body)
     resp = resp[resp.find('<'):]  # strip headers
     # print(resp)
     data = parker.data(fromstring(resp), preserve_root=True)  # convert to json
     return data
Exemplo n.º 14
0
def test_empty():
    # arrange
    file = open("./test-data/test_input/empty.xml", "r")
    file_text = file.read()

    expected_message = open("./test-data/expected_messages/empty.log",
                            "r").read()

    # act & assert
    with pytest.raises(ParseError, match=r".*" + expected_message + ".*"):
        json_str = dumps(parker.data(fromstring(file_text)),
                         preserve_root=True)
Exemplo n.º 15
0
def convertPoiXML(ui, filepath):
    if validatePoiXML(filepath):
        poiTree = ET.parse(filepath)
        poiRoot = poiTree.getroot()
        poiDict = json.loads(json.dumps(pk.data(poiRoot)))
        return poiDict
    else:
        print('invalid POI XML (does not conform to POI schema)')
        QMessageBox.question(
            ui, "Error: Invalid File",
            "Provided file must be an XML that conforms to poiConfig,xsd (schema)",
            QMessageBox.Ok)
        return 0
Exemplo n.º 16
0
def callZillowApi():
    address = request.get_json()
    #zillow call
    payload = {
        "zws-id": config.zillow_key,
        "address": address["line1"],
        "citystatezip": address["locality"] + "," + address["countrySubd"]
    }
    resp = requests.get(zillow + "GetDeepSearchResults.htm", params=payload)
    json_data = json.loads(json.dumps(xmlparse.data(fromstring(resp.text))))
    if json_data["message"]["code"] != 0:
        return "", 503
    return json.dumps(json_data["response"]["results"]), 200
Exemplo n.º 17
0
 def getall(self):
      r = pr.data(self.elem)
      for ea in['BuildingName','EnvironmentName','WeatherFileLocationTitle'
              ,'ProgramVersion','SimulationTimestamp']:
          r.pop(ea,None)
      for ea in r:
          if not 'monthly' in ea:  
              for j in ['for','note','footnote','General']: r[ea].pop(j,None)
              for j in r[ea]:
                  if type(r[ea][j]) is list:
                      self.__setattr__(j,pd.DataFrame(r[ea][j]).set_index('name'))
                  else:
                      self.__setattr__(j,pd.Series(r[ea][j]))
Exemplo n.º 18
0
def test_invalid_xml():
    # arrange
    input = open("./test-data/test_input/invalid.xml", "r")
    input_text = input.read()
    print("input_text: " + input_text)

    expected_message = open("./test-data/expected_messages/invalid_xml.log",
                            "r").read()

    # act & assert
    with pytest.raises(ParseError, match=r".*" + expected_message + ".*"):
        json_str = dumps(
            parker.data(fromstring(input_text), preserve_root=False))
Exemplo n.º 19
0
Arquivo: rpc.py Projeto: cm58/rtremote
 def call(self, method, params=None):
     body = "<?xml version='1.0'?><methodCall><methodName>" + \
         method + "</methodName><params>"
     if params:
         for p in params:
             body += '<param><value><' + p[0] + '>' + str(p[1]) + '</' + p[0] + '></value></param>'
     body += "</params></methodCall>"
     scgi = Scgi(self.host_port)
     resp = scgi.post(body)
     resp = resp[resp.find('<'):]  # strip headers
     # print(resp)
     data = parker.data(fromstring(resp), preserve_root=True)  # convert to json
     return data
Exemplo n.º 20
0
    def __iter_extended_rows(self):
        from xml.etree.ElementTree import parse
        from xmljson import parker

        parsed = parker.data(parse(self.__chars).getroot())
        elements = list(parsed.values())
        if len(elements) > 0:
            elements = elements[0]
        else:
            elements = []
        for row_number, row in enumerate(elements, start=1):
            keys, values = zip(*(row.items()))
            yield (row_number, list(keys), list(values))
Exemplo n.º 21
0
def lambda_handler(event, context):
    data = json.loads(event)

    if data['isFile']:
        xml = open(data['xml'], 'r').read()
    else:
        xml = data

    if data['attributes']:
        result = bf.data(fromstring(xml))
    else:
        result = parker.data(fromstring(xml), preserve_root=True)

    return json.dumps(result)
Exemplo n.º 22
0
    def get_data(self,
                 unique_data_ids=None,
                 sample=False,
                 output_type='csv',
                 **kwargs):
        """
        Returns a JSON object of the entire data set.

        """
        data_json = None
        db = kwargs.get('db', None)

        if unique_data_ids is None:
            unique_data_ids = self._available_unique_data_ids

        for u in unique_data_ids:
            if (u not in self._available_unique_data_ids):
                logger.info(
                    "  The unique_data_id '{}' is not supported by the DhcdApiConn"
                    .format(u))

            else:
                result = self.get(self._urls[u], params=self._params[u])

                if result.status_code != 200:
                    err = "An error occurred during request: status {0}"
                    logger.exception(err.format(result.status_code))
                    continue

                data_xml_root = xml_fromstring(result.text)
                data_xml_records = data_xml_root.findall('record')
                data_json = xml_to_json.data(data_xml_root)

                results = [
                    DhcdResult({e.tag: e.text
                                for e in list(r)}, self._fields[u]).data
                    for r in data_xml_records
                ]

                self.result_to_csv(self._fields[u], results,
                                   self.output_paths[u])

                #Convert to format expected by database
                if u == 'dhcd_dfd_properties':
                    self.create_project_subsidy_csv('dhcd_dfd_properties',
                                                    PROJECT_FIELDS_MAP,
                                                    SUBSIDY_FIELDS_MAP,
                                                    PROJECT_ADDRE_FIELDS_MAP,
                                                    db)
Exemplo n.º 23
0
 def getmonthly(self):
      r = pr.data(self.elem)
      for ea in r:
          if 'monthly' in ea:
              if type(r[ea]) is list:
                  s = pd.DataFrame()
                  for j in r[ea]:
                      v=pd.DataFrame(j['CustomMonthlyReport'])
                      v['zone'] = j['for']
                      s=pd.concat([s,v])
                  s=s.set_index(['zone','name'])     
                  self.__setattr__(ea,s)
              else:
                  pd.DataFrame(r[ea]['CustomMonthlyReport'])
                  self.__setattr__(ea,pd.DataFrame(r[ea]['CustomMonthlyReport']))
Exemplo n.º 24
0
def test_nested_empty():
    # arrange
    input = open("./test-data/test_input/nested_empty.xml", "r")
    input_text = input.read()
    print("input_text: " + input_text)

    expected = open("./test-data/expected_output/nested_empty.json", "r")
    expected_text = expected.read()
    print("expected_text: " + expected_text)

    # act
    res = dumps(parker.data(fromstring(input_text), preserve_root=True))
    print("res: " + str(res))

    # assert
    assert res == expected_text
Exemplo n.º 25
0
def http_api_get(url):
    headers = {
        'User-Agent':
        'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36',
        'Referer': 'http://iotobservatory.io',
        'Content-Type': 'application/json'
    }
    try:
        r = requests.get(url, headers=headers)
        ret = r.content
        ret = xml_format(ret)
        ret_json = json.dumps(parker.data(fromstring(ret)))
    except Exception as e:
        print(e)
        ret_json = {}

    return ret_json
Exemplo n.º 26
0
    def format_dmf_response(self, response, ssn):
        dmf_record = parker.data(fromstring(response.content))

        dmf_record_present = False
        full_name = None

        if dmf_record['Record'].get('DmfSearch', False):
            dmf_record_present = True

        if dmf_record['Record']['CommercialNameSearch'].get('Identity', False):
            full_name_string = dmf_record['Record']['CommercialNameSearch'][
                'Identity']['NameInfo']['WholeName']
            full_name = full_name_string.split(',')[0]

        return {
            'ssn': ssn,
            'full_name': full_name,
            'dmf_record_present': dmf_record_present
        }
Exemplo n.º 27
0
def get_json_xml(data_folder, num_entries=0):
    """
		Using elemtree, parse xml to json using the parker transformation.

		The parker transformation keeps the tree structure in the json but removes the attributes. 
		There are however no attributes in the xml.
	"""
    i = 0
    result_dict = {}
    for filename in sorted(os.listdir(data_folder)):
        target_filename = data_folder + filename
        f = open(target_filename, 'r')
        text = f.read()
        xml = fromstring(text)
        mydict = json.loads(json.dumps(bf.data(xml)))
        result_dict[filename[0:-4]] = mydict
        i += 1
        if i == num_entries:
            break
    return result_dict
Exemplo n.º 28
0
def save_to_mongo(content):
    client = MongoClient()
    dbtemp = client.temp
    ##    timestmp = datetime.datetime.now().strftime("%Y-%d-%d %H:%M:%S")
    timestmp = datetime.datetime.utcnow()

    xmlread = content.decode('ascii')
    xmlread = fromstring(xmlread)

    newfile = dumps(parker.data(xmlread))
    data = loads(newfile)
    result = dbtemp.new_req_data.insert_one(data)

    db = client.reqdata
    cursor = dbtemp.new_req_data.distinct(
        "Unit.Packet.Payload.ResultSet.Jobs.Job")
    for document in cursor:
        newreq = {"dateAdded": timestmp, "req": document}
        db.req_data.insert_one(newreq)
    dbtemp.new_req_data.drop()
Exemplo n.º 29
0
def save_to_mongo(content):
    client = MongoClient()
    dbtemp = client.temp
    timestmp = datetime.datetime.now().strftime("%Y-%d-%d %H:%M:%S")

    xmlread = content.decode('ascii')
    xmlread = fromstring(xmlread)
    ##    bf = BadgerFish(dict_type=OrderedDict)

    newfile = dumps(parker.data(xmlread))
    data = loads(newfile)
    ##    print (data)
    result = dbtemp.new_req_data.insert_one(data)
    ##    print("Result: ", result)

    db = client.reqdata
    cursor = dbtemp.new_req_data.distinct(
        "Unit.Packet.Payload.ResultSet.Jobs.Job")
    for document in cursor:
        newreq = {"timestamp": timestmp, "req": document}
        print(newreq)
        db.req_data.insert_one(newreq)
def save_to_mongo(content):
    client = MongoClient()
##    client = MongoClient("192.168.1.169", 27107)
    dbtemp = client.temp
    timestmp = datetime.datetime.utcnow()
    
    xmlread = content.decode('ascii')
    xmlread = fromstring(xmlread)

    newfile = dumps(parker.data(xmlread))
    data = loads(newfile)
    result = dbtemp.new_req_data.insert_one(data)

    db=client.reqdata
    
    cursor = dbtemp.new_req_data.distinct("Unit.Packet.Payload.ResultSet.Jobs.Job")
    for document in cursor:
        newreq = { "dateAdded": timestmp,
                   "req" : document}
        db.req_data.insert_one(newreq)
    dbtemp.new_req_data.drop()
    print("New Reqs Added")