예제 #1
0
def get_sync_cursor(root):
    sync_folder = join(root, '.sync')
    cursor_path = join(sync_folder, '%s.cursor' % app_name)
    if os.path.isfile(cursor_path):
        with open(cursor_path) as f:
            return f.read()
    return ''
예제 #2
0
def delete_sync_cursor(root):
    sync_folder = join(root, '.sync')
    cursor_path = join(sync_folder, '%s.cursor' % app_name)
    try:
        delete_file(cursor_path, to_trash=False)
    except:
        pass
예제 #3
0
def show_info(site_folder):
    site_token = get_site_token(site_folder)
    print_with_color('Version: %s' % version, 'green')
    if not site_token:
        print_with_color('current folder is not a site of Bitcron', 'red')
    else:
        print_with_color(
            'Site Token: %s (do not tell anyone else!)' % site_token,
            'magenta')
        site_sync_configs = get_sync_configs(site_folder, site_token)
        if site_sync_configs:
            domain = site_sync_configs.get('domain')
            main_node = site_sync_configs.get('main_node')
            if domain:
                print_with_color('Domain: %s' % domain, 'green')
            if main_node:
                print_with_color('MainNode: %s' % main_node, 'yellow')
            site_node_config_filepath = join(site_folder, site_node_filename)
            try:
                with open(site_node_config_filepath) as f:
                    user_node_info = f.read().strip().strip('/').split('/')[-1]
                    if user_node_info:
                        print_with_color(
                            'MyNode: %s (current server)' % user_node_info,
                            'cyan')
            except:
                pass
예제 #4
0
def get_sync_configs(root, site_token):
    site_node_config_filepath = join(root, site_node_filename)
    user_site_node = None
    try:
        with open(site_node_config_filepath) as f:
            user_site_node = f.read().strip()
            user_site_node = user_site_node.strip('/').split('/')[-1]
    except:
        pass
    site_info = get_sync_site_info(site_token)
    if site_info:
        configs = {}
        main_node = site_info.get('main_node')
        site_domain = site_info.get('domain')
        url_fields = [
            'sync_url', 'sync_list_url', 'sync_content_url', 'sync_should_url'
        ]
        for url_field in url_fields:
            url = site_info.get(url_field)
            if not url:
                return {}
            if user_site_node:
                url = url.replace(main_node, user_site_node)
            configs[url_field] = url
        configs['domain'] = site_domain
        configs['main_node'] = main_node
        return configs
    return {}
예제 #5
0
def make_sure_sync_data_folder(root):
    sync_folder = join(root, '.sync')
    data_folder = get_sync_data_folder(root)
    if not os.path.isdir(data_folder):
        os.makedirs(data_folder)
        hide_a_path(sync_folder)
    return data_folder
예제 #6
0
def delete_site_folder_token(site_folder):
    if not os.path.isdir(site_folder):
        return  # ignore
    site_token_filepath = join(site_folder, site_token_filename)
    try:
        os.remove(site_token_filepath)
    except:
        pass
예제 #7
0
def get_site_token(site_folder):
    if not site_folder:
        return
    token_filepath = join(site_folder, site_token_filename)
    if os.path.isfile(token_filepath):
        try:
            with open(token_filepath) as f:
                token_content = f.read().strip()
            if len(token_content) == site_token_length:
                return token_content
        except:
            pass
예제 #8
0
def find_files_to_delete(root_path):
    sync_data_folder = get_sync_data_folder(root_path)
    if not os.path.isdir(sync_data_folder):  # never synced before
        return []
    files = loop_local_filesystem(root_path,
                                  check_md5=False)  # same_path already
    data_filenames = os.listdir(sync_data_folder)
    old_file_paths = []
    for data_filename in data_filenames:
        data_filepath = join(sync_data_folder, data_filename)
        try:
            with open(data_filepath) as f:
                data = json.loads(f.read())
                filepath = data.get('filepath')
                if data.get('is_relative'):
                    filepath = join(root_path, filepath)
                if filepath:
                    old_file_paths.append(same_slash(filepath))
        except:
            pass
    return list(set(old_file_paths) - set(files))
예제 #9
0
def get_sync_data(filepath, root):
    # get the synced information for a filepath
    # 根据文件的路径,获得对应 md5 文件,里面存储了必要的信息(md5 * synced_at),用于判断当前文件是否需要同步
    filepath = same_slash(filepath)
    data_path = get_sync_data_filepath(filepath, root)
    if os.path.isfile(data_path):
        try:
            with open(data_path) as f:
                data = json.loads(f.read())
                if data.get('is_relative'):
                    # 相对路径,转为绝对路径
                    data['filepath'] = join(root, data['filepath'])
            if isinstance(data, dict):
                return data
        except:
            pass
    return {}  # final
예제 #10
0
def loop_local_filesystem(root_path, check_md5=True):
    root_path = same_slash(root_path)
    if not os.path.isdir(root_path):  # 根目录不存在,不处理
        return []
    file_paths = []
    for parent, folders, files in os.walk(root_path):
        if is_a_hidden_path(parent):
            continue
        elif not is_real(parent):  # link类型的不处理
            continue
        for fs in [files, folders]:
            for filename in fs:
                filepath = join(parent, filename)
                # 看是否已经在本地数据库了
                if not should_sync(filepath, root_path, check_md5):
                    continue
                file_paths.append(filepath)
    return file_paths
예제 #11
0
def store_sync_cursor(root, cursor):
    make_sure_sync_data_folder(root)
    sync_folder = join(root, '.sync')
    cursor_path = join(sync_folder, '%s.cursor' % app_name)
    with open(cursor_path, 'w') as f:
        f.write(cursor)
예제 #12
0
def make_sure_sync_log_path(root):
    make_sure_sync_data_folder(root)
    sync_folder = join(root, '.sync')
    log_path = join(sync_folder, '%s.log' % app_name)
    return log_path
예제 #13
0
def get_sync_log_path(root):
    sync_folder = join(root, '.sync')
    log_path = join(sync_folder, '%s.log' % app_name)
    return log_path
예제 #14
0
def main():
    current_folder = os.getcwd()
    argv = sys.argv

    if len(argv) == 2 and argv[1].strip('-') in ['help', 'h']:
        return show_help()

    if len(argv) == 2 and len(argv[1].strip()) == site_token_length:
        # set token
        site_token = argv[1].strip()
        site_token_filepath = join(current_folder, site_token_filename)
        make_sure_path(site_token_filepath)
        with open(site_token_filepath, 'w') as f:
            f.write(site_token)
        info = "site token is stored in %s now, do not public this file to anyone else." % site_token_filepath
        print_with_color(info, 'yellow')
    else:
        if not is_site_folder(current_folder):
            # try to match parents
            current_folder = get_site_folder_from_parents(current_folder)
        if not is_site_folder(current_folder):
            print_with_color(
                "please run `bitcron TOKEN` first, you can find site TOKEN from your sites list page.",
                'red')
        else:
            print_with_color('under %s now' % current_folder, 'green')

            # set site node
            if len(argv) == 2 and argv[1].strip().endswith('.bitcron.com'):

                site_node = argv[1].strip()
                site_node_config_filepath = join(current_folder,
                                                 site_node_filename)
                with open(site_node_config_filepath, 'w') as f:
                    f.write(site_node)
                print_with_color('current node on %s now' % site_node, 'green')
                return

            action = ''
            if len(argv) == 1:
                action = 'sync_to'  # sync to server
            elif len(argv) == 2:
                raw_action = argv[1]
                if raw_action in ['sync', 'sync_from', 'sync-from']:
                    action = 'sync_from'
                elif raw_action in ['logout']:
                    action = 'logout'
                elif raw_action in ['reset']:
                    action = 'reset'
                else:
                    action = raw_action

            if action == 'info':
                show_info(current_folder)
            elif action in ['site', 'open']:
                opened_url = open_site_in_browser(current_folder)
                if not opened_url:
                    print_with_color("can't handle this command", 'red')
                else:
                    print_with_color(
                        "%s is opened in your browser now" % opened_url,
                        'cyan')
            elif action == 'sync_to':
                do_sync_to(token=get_site_token(current_folder),
                           root=current_folder)
            elif action == 'sync_from':
                do_sync_from(token=get_site_token(current_folder),
                             root=current_folder)
            elif action == 'logout':
                delete_site_folder_token(current_folder)
                print_with_color(
                    "site TOKEN of current site folder is cleared on this disk.",
                    'red')
            elif action == 'reset':
                delete_sync_meta_folder(current_folder)
                print_with_color(
                    "metadata information files of site folder are cleared on this disk.",
                    'red')
            else:
                print_with_color("can't find matched command to run", 'red')
예제 #15
0
def get_sync_data_folder(root):
    data_path = join(root, '.sync/%s' % app_name)
    return data_path
예제 #16
0
def delete_sync_meta_folder(site_folder):
    delete_site_folder_token(site_folder)
    meta_folder = join(site_folder, '.sync')
    delete_file(meta_folder)
예제 #17
0
def handle_meta(token, meta, root_path, sync_content_url):
    # prefix 更多是一个范围,一般是一个 site folder
    root_path = same_slash(root_path).rstrip('/')
    relative_path = same_slash(meta['path']).lstrip(
        '/')  # 这个是大小写敏感的, 相对于根目录下的相对路径
    site_name = relative_path.strip('/').split('/')[0]

    full_path = join(root_path, relative_path)  # 本地电脑的path
    version = meta.get('version', '')
    is_deleted = meta.get('is_deleted', False)
    is_dir = meta.get('is_dir', False)

    if is_a_hidden_path(relative_path):  # 隐藏文件不处理
        return

    if relative_path.startswith(
            '_cache/') or relative_path == '_cache':  # cache 文件不处理
        return

    if os.path.exists(join(root_path, site_name, '.sync_ignore')):
        return  # ignore

    if is_deleted:
        if os.path.exists(full_path):
            delete_file(full_path)
            # delete
            after_sync_deleted(full_path, root=root_path)
            return 'got %s from server, delete' % relative_path
    elif is_dir:
        if not os.path.isdir(full_path):
            try:
                os.makedirs(full_path)
                after_synced(full_path, root=root_path)
            except OSError:
                return 'failed to create dir %s' % full_path
            except:
                pass
    else:  # 具体的文件
        file_id = to_unicode(meta['_id'])
        need_download = True
        if os.path.isfile(full_path):
            old_version = md5_for_file(full_path)
            if old_version == version:
                need_download = False

        if need_download:
            # 需要下载具体的文件
            timeout = 20 * 60
            if is_markdown(full_path):
                timeout = 2 * 60
            response = requests.post(sync_content_url,
                                     data=dict(token=token, id=file_id),
                                     timeout=timeout)
            if response.status_code >= 400:
                # ignore
                return 'fail to get %s, status code is %s' % (
                    full_path, response.status_code)
            content = response.content
            try:
                create_file(full_path, content)
            except OSError:
                return 'failed to create file then ignore %s' % full_path

            after_synced(full_path,
                         root=root_path)  # 保存这个状态,这样下次同步的时候,就不会认为这个文件需要同步了
            time.sleep(0.2)  # 避免过度请求被服务器封锁的情况
            return 'got %s from server' % relative_path
        else:
            return '%s same file in both client side and server side' % relative_path
예제 #18
0
def get_sync_data_filepath(filepath, root):
    data_folder = make_sure_sync_data_folder(root)
    data_filename = get_sync_data_filename(filepath, root)
    data_filepath = join(data_folder, data_filename)
    return data_filepath
예제 #19
0
def do_sync_to(token, root):
    # need to upload
    write_logs(['start to sync now'], '', root)  # init
    changes_log_filepath = join(root, 'sync_changes.log')

    sync_configs = get_sync_configs(root, token)
    if not sync_configs:
        write_logs(['site TOKEN error or network connection error'], '',
                   root)  # init
        return
    sync_should_url = sync_configs['sync_should_url']
    sync_url = sync_configs['sync_url']

    try:
        file_paths = loop_local_filesystem(root)
        try:
            file_paths.remove(changes_log_filepath)
        except:
            pass

        if file_paths:
            write_logs('will try to sync %s files to server' % len(file_paths),
                       root=root)
        for filepath in file_paths:
            logs = sync_file_to_server(token,
                                       filepath,
                                       root=root,
                                       sync_should_url=sync_should_url,
                                       sync_url=sync_url)
            if should_break_sync(logs, root):
                break
            write_logs(logs, filepath, root)
            if logs:  # 不管日志上是否显示成功,都进行存档
                after_synced(filepath, root)

        files_to_delete = find_files_to_delete(root)
        if files_to_delete:
            write_logs('will try to delete %s files on server' %
                       len(files_to_delete),
                       root=root)
        for filepath in files_to_delete:
            # delete file
            logs = sync_file_to_server(token,
                                       filepath,
                                       root=root,
                                       sync_should_url=sync_should_url,
                                       sync_url=sync_url)
            if should_break_sync(logs, root):
                break
            write_logs(logs, filepath, root)
            after_sync_deleted(filepath,
                               root)  # no need to try again to delete on cloud

        # 将file_paths & files_to_delete 写到 root 内的某个配置文件内,以供其它调用
        if files_to_delete or file_paths:
            data = dict(
                date=get_now_str(),
                root=root,
                added=file_paths,
                deleted=files_to_delete,
            )
            with open(changes_log_filepath, 'w') as f:
                f.write(json.dumps(data))
        else:
            write_logs(['no need to sync to server, it is up-to-date.'],
                       root=root)

    except Exception, e:
        write_logs([to_str(e)], '', root)  # init
        write_logs(['unexpected errors happen, abort syncing now'], '',
                   root)  # init
        error_info = sys.exc_info()
        if error_info:
            e_type, value, tb = error_info[:3]
            traceback.print_exception(e_type, value, tb)