Пример #1
0
def write_json_to_csvs(sheet2data, timestamp):
    histdir = 'out/history/' + timestamp.strftime('%Y-%m-%d')
    os.makedirs(histdir, exist_ok=True)
    os.makedirs('out/csv', exist_ok=True)
    for i, (sheetname, data) in enumerate(sheet2data):
        data, fields = utils.data2fields(data)
        print(i, sheetname, fields)

        with open('out/csv/' + sheetname + '.csv', 'w') as csvfile:
            utils.writeToCsv(data, fields, csvfile)

        with open(histdir + '/' + sheetname + '.csv', 'w') as csvfile:
            utils.writeToCsv(data, fields, csvfile)
Пример #2
0
if lastUpdateLocal == lastUpdateRemote:
    print('local == remote, stoping.', lastUpdateLocal, lastUpdateRemote)
    exit()
print('local != remote, continuing.', lastUpdateLocal, lastUpdateRemote)

with open('jsons/mohfiles.json') as f:
    mohfiles = json.load(f)
    for moh in mohfiles:
        print(moh['name'], moh['asset'])
        try:
            url = "https://data.gov.il/api/3/action/datastore_search?resource_id=" + moh[
                "asset"] + "&limit=9999"
            text = urllib.request.urlopen(url).read().decode('utf-8')
            jsonobj = json.loads(text)
            data = jsonobj['result']['records']
            data, fields = utils.data2fields(data)
            with open('out/csv/' + moh['name'] + '.csv', 'w') as csvfile:
                utils.writeToCsv(data, fields, csvfile)
        except urllib.request.HTTPError as err:
            print("HTTPError: {0}".format(err))
        finally:
            pass

dashrequest = get_dash_req()
for r in dashrequest['requests']:
    print(r['id'], r['queryName'])
# print(json.dumps(dashrequest, indent=4, sort_keys=True))
# exit()
sheets = list(map(lambda x: x['queryName'], dashrequest['requests']))

dashjson = get_dash_data()