def patch_configs(patches_path):
    _logger.info("Patching config files...")
    contents = os.listdir(patches_path)

    for pname in contents:
        ppath = "/".join([patches_path, pname])
        target = "conf/" + pname

        if fileutil.isfile(target):
            with open(target) as f:
                targ_dict = json.load(f)
                wdt.feed()
        else:
            targ_dict = {}

        with open(ppath) as f:
            patch_dict = json.load(f)
            wdt.feed()
        targ_dict.update(patch_dict)

        fileutil.mkdirs("conf", wdt=wdt)
        with open(target, "w") as f:
            f.write(json.dumps(targ_dict))
            wdt.feed()
        _logger.info("New %s: %s", target, targ_dict)
def reset_update_for_test(subpath):
    """
    Dumps current state to update directory so update will be idempotent, and resets updates status

    Example:

        hw.power_peripherals(True)
        hw.mount_sd_card()
        import co2unit_update
        co2unit_update.wdt = wdt
        co2unit_update.reset_update_for_test("/sd/updates/update-2019-07-26")
        import os
        os.remove("/sd/var/updates-state.json")
    """
    import machine
    _logger.info("Clearing update subdirectory %s", subpath)
    fileutil.rm_recursive(subpath, wdt=wdt)
    _logger.info("Copying current code to %s", subpath)
    fileutil.copy_recursive("/flash", subpath + "/flash", wdt=wdt)
    rand_site_code = "rand_site_%04x" % (machine.rng() % (16**4))
    _logger.info("Adding a randomized conf_patch: %s", rand_site_code)
    fileutil.mkdirs(subpath + "/conf_patch")
    with open(subpath + "/conf_patch/ou-id.json", "w") as f:
        patch = {"site_code": rand_site_code}
        f.write(json.dumps(patch))
def pull_last_dir(sync_dest, ou_id, cc, dpath, ss):
    # Find most recent update
    _logger.info("Fetching available directories in %s ...", dpath)
    dirlist = fetch_dir_list(sync_dest, ou_id, cc, dpath)
    if not dirlist:
        _logger.info("Remote %s is missing or empty", dpath)
        return False

    most_recent = seqfile.last_file_in_sequence(dirlist)
    _logger.info("Latest in %s: %s", dpath, most_recent)

    if not most_recent:
        _logger.info("Nothing to fetch")
        return False

    # Get list of files in update
    rpath = "{dpath}/{most_recent}".format(dpath=dpath, most_recent=most_recent)
    if fileutil.isdir(rpath):
        _logger.info("Already have %s, skipping", rpath)
        return False

    _logger.info("Getting list of files in %s ...", rpath)
    tmp_dir = "tmp/" + rpath
    fetch_paths = fetch_dir_list(sync_dest, ou_id, cc, rpath, recursive=True)

    # Fetch each file
    _logger.info("Fetching files to %s", tmp_dir)
    for fpath in fetch_paths:
        tmp_path = "/".join([tmp_dir,fpath])
        if fileutil.isfile(tmp_path): continue

        path = "/ou/{id}/{rpath}/{fpath}".format(id=ou_id.hw_id, rpath=rpath, fpath=fpath)
        wdt.feed()
        resp = request("GET", sync_dest, path)
        fileutil.mkdirs(fileutil.dirname(tmp_path), wdt=wdt)
        content = resp.content
        wdt.feed()
        with open(tmp_path, "w") as f:
            # TODO: make sure to write all
            f.write(content)
            wdt.feed()

    # When finished, move whole directory in place
    _logger.info("Moving %s into place", rpath)
    fileutil.mkdirs(dpath, wdt=wdt)
    os.rename(tmp_dir, rpath)
    wdt.feed()

    return True
    def __init__(self, dirname, fname=None, progress=None, totalsize=None):
        self.fname = fname
        self.progress = progress
        self.totalsize = totalsize

        self.dirname = dirname
        # Make sure directory exists before trying to read it
        fileutil.mkdirs(dirname, wdt=wdt)
        self.dirlist = os.listdir(dirname)
        self.dirlist.sort()
        if not self.dirlist:
            _logger.info("%s: dir is empty. Nothing to push", dirname)

        self.dirindex = None
        self.update_by_fname(fname, progress)
Пример #5
0
@webhook_handler.default()
def default(event):
    """
    使用者【其他】事件
    """
    print('使用者【其他】事件 ', event, flush=True)


############################################################################################################


# 伺服器開啟 Link Start!
if __name__ == "__main__":
    print("===== Link Start! =====")  
  
    
    fileutil.mkdirs(".output")
    # handler = TimedRotatingFileHandler(
    #     fileutil.abs_path(".output/linebot.log"), 
    #     when="D", 
    #     interval=1, 
    #     backupCount=15,
    #     encoding="UTF-8", 
    #     delay=False, 
    #     utc=True
    # )
    # handler = 
    # app.logger.addHandler(handler)
    
    app.run(debug=True, host='0.0.0.0', port=USE_PORT)
def save_comm_state(cs):
    fileutil.mkdirs(STATE_DIR, wdt=wdt)
    configutil.save_config_json(COMM_STATE_PATH, cs)
    _logger.info("State saved to %s: %s", COMM_STATE_PATH, cs)
Пример #7
0
def save_config_json(fpath, namespace):
    fileutil.mkdirs(fileutil.dirname(fpath))
    with open(fpath, "wt") as f:
        f.write(json.dumps(namespace.__dict__))
    _logger.info("%s saved: %s", fpath, namespace.__dict__)