Ejemplo n.º 1
0
def export_dataframe_to_csv(name: str, dataframe):
    filename = os.path.join(CSV_FOLDER, f'{name}.csv')
    old_hash = sha1sum(filename)
    dataframe.replace({
        0: None
    }).astype('Int64').to_csv(filename, line_terminator='\r\n')
    write_timestamp_file(filename=filename, old_hash=old_hash)
Ejemplo n.º 2
0
 def login(self, username, password):
     cookies = self.load_cookies()
     if self.test_cookies(cookies):
         return
     password = utils.sha1sum(password)
     data = {'username': username, 'sha1': password, 'op': 'login'}
     r = self.post(data)
     if 'You need to login before proceed!' not in r.text:
         self.cookies = r.cookies
         self.dump_cookies()
Ejemplo n.º 3
0
export_dataframe_to_csv(name='municipality-confirmed', dataframe=df.cumsum())

df = df.rolling(min_periods=1, window=14).sum().replace({0: None}).astype('Int64') \
    .drop('region.n.neznano', axis='columns')
export_dataframe_to_csv(name='municipality-active', dataframe=df)

with open(file=os.path.join(CSV_FOLDER, 'dict-municipality.csv'), encoding='utf-8') as f:
    for row in csv.DictReader(f):
        municipalities[row['name'].lower()] = row
        if row['name_alt']:
            municipalities[row['name_alt'].lower()] = row

# --- municipality-deceased.csv ---
# Copy paste latest row for every missing date
municipality_deceased_csv_path = os.path.join(CSV_FOLDER, 'municipality-deceased.csv')
old_hash = sha1sum(municipality_deceased_csv_path)
with open(file=municipality_deceased_csv_path, encoding='utf-8') as f:
    rows = [row for row in csv.DictReader(f)]

latest_date = str([val for val in df.index.values][-1]).split('T')[0]
latest_date = datetime.strptime(latest_date, '%Y-%m-%d').date()
while (date := datetime.strptime(rows[-1]['date'], '%Y-%m-%d').date()) < latest_date:
    rows.append(copy.deepcopy(rows[-1]))
    rows[-1]['date'] = str(date + timedelta(days=1))
# Write the rows collection back to the csv
with open(municipality_deceased_csv_path, 'w', newline='', encoding='utf-8') as csvfile:
    writer = csv.DictWriter(csvfile, fieldnames=rows[0].keys())
    writer.writeheader()
    for row in rows:
        writer.writerow(row)
write_timestamp_file(filename=municipality_deceased_csv_path, old_hash=old_hash)
Ejemplo n.º 4
0
    for key in [
            'kranj', 'ljubljana', 'domzale', 'saleske', 'koper', 'celje',
            'maribor'
    ]:
        if key in components:
            return key
    raise Exception('No location key found: ', components)


mapping = {}
for column in df.columns:
    if column == 'NIB Measurements':
        mapping['NIB Measurements'] = 'measurements.nib'
        continue

    components = column.lower().split('_')
    location = get_location_key(components=components)
    components.remove(location)
    location = {
        'saleske': 'velenje'
    }.get(location, location)  # transform some of the location names
    mapping[column] = f'sewage.{location}.{"-".join(components)}'

df.rename(mapper=mapping, axis='columns', inplace=True)

# export to csv
filename = os.path.join(CSV_FOLDER, 'sewage.csv')
old_hash = sha1sum(filename)
df.to_csv(filename)
write_timestamp_file(filename=filename, old_hash=old_hash)
Ejemplo n.º 5
0
export_dataframe_to_csv(name='regions', dataframe=df.cumsum())

df = df.rolling(min_periods=1, window=14).sum().replace({0: None}).astype('Int64') \
    .drop('region.n.neznano', axis='columns')
export_dataframe_to_csv(name='active-regions', dataframe=df)

with open(os.path.join(CSV_FOLDER, 'dict-municipality.csv')) as f:
    for row in csv.DictReader(f):
        municipalities[row['name'].lower()] = row
        if row['name_alt']:
            municipalities[row['name_alt'].lower()] = row

# --- deceased-regions.csv ---
# Copy paste latest row for every missing date
deceased_regions_csv_path = os.path.join(CSV_FOLDER, 'deceased-regions.csv')
old_hash = sha1sum(deceased_regions_csv_path)
with open(deceased_regions_csv_path) as f:
    rows = [row for row in csv.DictReader(f)]

latest_date = str([val for val in df.index.values][-1]).split('T')[0]
latest_date = datetime.strptime(latest_date, '%Y-%m-%d').date()
while (date := datetime.strptime(rows[-1]['date'], '%Y-%m-%d').date()) < latest_date:
    rows.append(copy.deepcopy(rows[-1]))
    rows[-1]['date'] = str(date + timedelta(days=1))
# Write the rows collection back to the csv
with open(deceased_regions_csv_path, 'w', newline='') as csvfile:
    writer = csv.DictWriter(csvfile, fieldnames=rows[0].keys())
    writer.writeheader()
    for row in rows:
        writer.writerow(row)
write_timestamp_file(filename=deceased_regions_csv_path, old_hash=old_hash)
Ejemplo n.º 6
0
def do_process(data, settings):
    # Preparing directories
    utils.checkDir(utils.GET("object_dir"), "Object")
    if utils.GET("toposort_verbose_logging_dir") is not None and utils.GET(
            "toposort_verbose_logging_dir") != "":
        utils.checkDir(utils.GET("toposort_verbose_logging_dir"),
                       "Toposort verbose logging")
    originalCXX = utils.GET("original_cxx_executable")
    originalCC = utils.GET("original_cc_executable")

    finishedList = Manager().list()

    totalLength = len(data["compile"])
    compileTaskPool = Pool()
    console.log("Compiling .o (total: {})".format(totalLength))
    for r in range(totalLength):
        i = data["compile"][r]
        execname = "(unknown)"
        cmdline = list(filter(lambda x: x != "", i.split(" ")))
        filehashpath = ["0" for i in range(0, 40)]
        for argnum in range(len(cmdline)):
            if cmdline[argnum] == originalCXX:
                cmdline[argnum] = utils.GET("targeted_cxx_executable")
                cmdline[argnum] += " -emit-llvm"
            elif cmdline[argnum] == originalCC:
                cmdline[argnum] = utils.GET("targeted_cc_executable")
                cmdline[argnum] += " -emit-llvm"
            elif cmdline[argnum] == "-o":
                filepath = realpath(cmdline[argnum + 1])
                filehashpath = utils.sha1sum(filepath)
                sha1Table[filehashpath] = filepath
                cmdline[argnum + 1] = realpath(
                    utils.GET("object_dir") + "/" + filehashpath)
                execname = utils.findName(filepath)
            elif cmdline[argnum] == "-c":
                cmdline[argnum] = "-S"
            elif cmdline[argnum] == "-g":
                cmdline[argnum] = ""
        command = " ".join(cmdline)
        compileTaskPool.apply_async(single_compile,
                                    args=(command, filehashpath, execname, r,
                                          totalLength, finishedList,
                                          settings.clean),
                                    error_callback=console_error_and_exit)
    compileTaskPool.close()
    compileTaskPool.join()

    # Construct the graph
    console.success("All object files are compiled.")

    console.info("Preparing linking relationships")
    graphData = data["scripts"]

    for i in graphData:
        itemPath = i["target"]["abs_path"]
        hashedItemPath = utils.sha1sum(itemPath)
        sha1Table[hashedItemPath] = itemPath
        itemDependencies = i["target"]["dependencies"]
        dependencyList[hashedItemPath] = utils.deduplicate(
            utils.pathToSha1(itemDependencies, sha1Table))
        if hashedItemPath in dependencyList[hashedItemPath]:
            console.warn("Self-circle found. Ignoring.")
            dependencyList[hashedItemPath].remove(hashedItemPath)

    preserveProcess = utils.GET("preserve_process")
    if preserveProcess != None and preserveProcess != "":
        console.info("Saving metadata")
        sha1FilePath = utils.GET("object_dir") + "/" + preserveProcess
        try:
            json.dump(
                sha1Table,
                open(utils.GET("object_dir") + "/" + preserveProcess, "w"))
            console.success("Metadata saved.")
        except PermissionError:
            console.warn(
                "Process file {} is not writable, while preseve_process is on."
                .format(sha1FilePath))

    console.info("Calculating linking sequence")
    try:
        currList = utils.topoSort(dependencyList, finishedList, sha1Table)
    except ValueError:
        console.error("Topo sort failed to complete. Please check your data.")
        sys.exit(1)
    console.success("Linking sequence calculated.")

    if settings.clean or settings.clean_linking:
        console.info("Cleaning linking targets")
        for i in dependencyList.keys():
            if os.access(utils.GET("object_dir") + "/" + i, os.W_OK):
                os.unlink(utils.GET("object_dir") + "/" + i)
        console.success("Linking targets cleaned.")

    if len(currList) != len(graphData):
        console.warn("Bad consistance on linking recipe")
    console.debug("Linking sequence:", currList, "or",
                  list(map(lambda x: sha1Table[x], currList)))
    console.info("Start linking")
    ctrLen = len(currList)
    p = Pool()
    for idx, obj in enumerate(currList):
        console.info("Linking {} ({})  [{}/{}]".format(sha1Table[obj], obj,
                                                       idx + 1, ctrLen))
        p.apply_async(single_linking,
                      args=(obj, finishedList),
                      error_callback=console_error_and_exit)
    p.close()
    p.join()
    console.success("All targets are linked.")
    console.success("Finished.")