예제 #1
0
def remove_dataset(cfg, grace, dataset):
    logger = logging.getLogger("Syncropy")
    logger.info("Removing " + grace + " backup for dataset " + str(dataset))
    with storage.Database(cfg) as dbs:
        storage.db_del_dataset(dbs, grace, dataset)
    logger.debug("Database cleaned")

    storage.fs_remove_dataset(cfg, grace, dataset)
    logger.debug("Dataset " + str(dataset) + " tree for " + grace +
                 " section removed")
예제 #2
0
파일: sync.py 프로젝트: kasmi2004/syncropy
def fs_get_data(cfg, section):
    if (section["dataset"] - 1) == 0:
        previous = int(cfg["dataset"][section["grace"]])
    else:
        previous = section["dataset"] - 1

    with storage.Database(cfg) as dbs:
        for item in storage.db_list_items(dbs, section, "directory"):
            try:
                storage.fs_save(cfg, section, item)
            except FileExistsError:
                pass
        for item in storage.db_list_items(dbs, section, "file"):
            if storage.db_item_exist(dbs, section, item, previous):
                storage.fs_save(cfg, section, item, previous=True)
            else:
                with closing(get_remote_conn(cfg, section["name"])) as conn:
                    storage.fs_save(cfg, section, item, conn=conn)

        for item in storage.db_list_items(dbs, section, "symlink"):
                storage.fs_save(cfg, section, item)
예제 #3
0
파일: sync.py 프로젝트: kasmi2004/syncropy
def fs_get_metadata(cfg, section):
    logger = logging.getLogger("Syncropy")
    cmdlist = {
        "context": "file",
        "command": {
            "name": "list",
            "directory": cfg[section["name"]]["path"].split(","),
            "acl": cfg[section["name"]].getboolean("acl")
        }
    }

    with closing(get_remote_conn(cfg, section["name"])) as conn, storage.Database(cfg) as dbs:
        conn.send(json.dumps(cmdlist).encode("utf-8"))
        logger.debug(section["name"] + ": JSON command list sended")

        f = conn.makefile()
        for data in f:
            response = json.loads(data)
            storage.db_save_attrs(dbs, section, response)

        logger.debug(section["name"] + ": JSON list readed")
def fetchComments():
    db = storage.Database()

    # create _data/comments folder
    mkdirIfMissing(COMMENTS_DATA)

    for playlist in db.playlists:
        for video in playlist.videos:
            videoCommentsFilePath = COMMENTS_DATA + "/" + playlist.short + "_" + video.id + ".json"
            # write empty comments
            if not os.path.isfile(videoCommentsFilePath):
                with open(videoCommentsFilePath, 'w') as f:
                    json.dump(VideoComments().toJsonSerializable(),
                              f,
                              indent=4)
                    f.write("\n")

            # search for youtube upload
            for upload in video.uploads:
                if upload.hoster == "youtube":
                    print(
                        "Fetching comments for {}/{} (https://www.youtube.com/watch?v={})"
                        .format(playlist.short, video.id, upload.id))
                    comments = videoFetchAllComments(upload.id)

                    # check whether comments could be loaded
                    #print(len(comments.comments))
                    if len(comments.comments) > 0:
                        # write to disk
                        with open(videoCommentsFilePath, "w") as f:
                            json.dump(comments.toJsonSerializable(),
                                      f,
                                      indent=0,
                                      ensure_ascii=False)
                            f.write("\n")

                    # do not load comments of any other youtube uploads (maybe
                    # this could be changed later)
                    break
예제 #5
0
    "dummy": plugins.dummy
}

parser = argparse.ArgumentParser(description='Synchronize ')
parser.add_argument("--part", type=str)
parser.add_argument("--playlist", type=str)
parser.add_argument("--hoster", type=str)
parser.add_argument("--resolution", type=int, default=-1)
parser.add_argument("--download", action="store_true")
parser.add_argument("--upload", action="store_true")
parser.add_argument("--delete-offline", action="store_true")
parser.add_argument("--gen-vtt", action="store_true")

args = parser.parse_args()

db = storage.Database()


def pluginByName(pluginName):
    return SYNCHROLOAD_PLUGINS[pluginName]


def check_availability(video, upload, playlist, part):
    plugin = SYNCHROLOAD_PLUGINS[upload.hoster]
    print(
        "[check online] Checking availability for {} {} on {} ({}) ...".format(
            playlist.name, part, plugin.HOSTER_NAME, upload.id),
        end="")
    if not downloader.check_availability(plugin.linkFromId(upload.id)):
        if plugin.HOSTER_KEEP_UNAVAILABLE_UPLOADS:
            print(" [FAIL] - Disabling!")