Exemple #1
0
def do_treat(args):
    fs_dir = os.path.abspath(".")
    
    repo = common.get_repo(fs_dir)
    if not repo:
        log.critical("Could not find a repository with {} in copies...".format(fs_dir))
        return

    if not status.has_status(repo):
        log.critical("Status files are missing, run `status` first.")
        return
    
    if args["new"] or args["all"]:
        treat_new(repo, fs_dir, delete_on_missing=args["--delete"])

    if args["missing"] or args["all"]:
        treat_missing(repo, fs_dir)
    
    if args["updated"] or args["all"]:
        treat_updated(repo, fs_dir)

    if args["moved"] or args["all"]:
        treat_moved(repo, fs_dir)
        
    log.warn("Don't forget to run `status --force` to refresh status files.")
Exemple #2
0
def do_status(args):    
    fs_dir = os.path.abspath(".")

    do_checksum = args["--checksum"]
    
    repo = common.get_repo(fs_dir)
    if not repo:
        log.critical("Could not find a repository with {} in copies...".format(fs_dir))
        return
        
    if args["show"]:
        do_show(repo)
        
    elif args["clean"]:
        if not do_clean(repo):
            log.warn("No status file to clean for repository '{}'.".format(repo.name))
        
    elif args["verify"]:
        verify.verify_all(repo, fs_dir)
        
    else:
        if has_status(repo) and not args["--force"]:
            do_show(repo)
            log.info("(Run `status --force` to force rescan.)")
        else:
            do_clean(repo)
            status(repo, fs_dir, do_checksum)
Exemple #3
0
def do_info(args):
    fs_dir = os.path.abspath(".")

    log.info("Config path: {}/".format(config.CONFIG_PATH))
    
    repo = common.get_repo(fs_dir)
    if repo is None:
        log.warn("No repository found for '{}'.".format(fs_dir))
        return
    log.info("Repository:  {} ({})".format(repo.name, repo.copyname))
    log.info("Copies:".format(repo.copyname, fs_dir))

    max_size = max(map(len, repo.get_copies()))
    for copy, dirname in repo.get_copies().items():
        log.info("  {}{}--> {}".format(copy,
                                       (max_size-len(copy)+1)*" ",
                                       dirname))
        
    log.info("")
    log.info("Temporary dir: {}".format(repo.tmp_dir))
    log.info("")
    log.info("Status files:")
    max_size = max(map(len, config.STATUS_FILES))
    for status_file in config.STATUS_FILES:
        try:
            path = os.path.join(repo.tmp_dir, status_file)
            with open(path) as status_f:
                nb_lines = len(status_f.readlines())
            log.info("  {}:{}{:3d} lines".format(status_file,
                                              (max_size-len(status_file)+1)*" ",
                                              nb_lines))
        except OSError as e:
            log.info("  {}: couldn't access. ({})".format(status_file, e))
Exemple #4
0
def process_counts_queue(event, context):
    """
    Delete count record by entity_id and version
    """
    logger.debug('event: {}'.format(event))
    logger.debug('event: {}'.format(context))

    records = event['Records']

    for record in records:
        repo, suppliers = get_repo(record)
        obj = json.loads(record['body'])
        repo.process_counts_queue(obj)
Exemple #5
0
def process_production_queue(event, context):
    """
    Process production record in queue
    """
    logger.debug('event: {}'.format(event))
    logger.debug('event: {}'.format(context))

    records = event['Records']

    for record in records:
        repo, suppliers = get_repo(record)
        obj = json.loads(record['body'])
        repo.process_production_queue(obj)
Exemple #6
0
def do_update(args):
    fs_dir = os.path.abspath(".")

    repo = common.get_repo(fs_dir)

    if not repo:
        log.critical("Could not find a repository with {} in copies...".format(fs_dir))
        return
    if not repo.copyname == "master":
        log.critical("Database can only be updated from master copy. "+
                     "Current directory is '{}' copy.".format(repo.copyname))
        return
    
    do_checksum = args["--checksum"]
    
    update_database(repo, fs_dir, do_checksum)
Exemple #7
0
def process_projections_queue(event, context):
    """
    Process SQS queue projections
    """
    logger.debug('event: {}'.format(event))
    logger.debug('event: {}'.format(context))

    records = event['Records']

    for record in records:
        repo, suppliers = get_repo(record)
        obj = json.loads(record['body'])
        supplier_id = obj['supplier_id']
        brand_id = obj['brand_id']
        package_type_id = obj['package_type_id']
        start_date = obj['start_date']
        repo.process_projections_queue(supplier_id, brand_id, package_type_id,
                                       start_date, context.aws_request_id)
Exemple #8
0
def process_supplier_save(event, context):
    """
    Process supplier save notification
    """
    logger.debug('event: {}'.format(event))
    logger.debug('event: {}'.format(context))

    records = event['Records']

    for record in records:
        obj = json.loads(record['Sns']['Message'])
        if not obj['active']:
            item = {'body': json.dumps(obj)}
            repo, suppliers = get_repo(item)

            # delete supplier from app metadata of all users belonging to supplier
            user_ids = (user["user_id"] for user in obj.get("users", []))
            for user_id in user_ids:
                repo.remove_supplier_from_app_metadata(obj["entity_id"],
                                                       user_id)
        for file in files:
            file_names.append(os.path.join(root, file))

    if file_names:
        import random
        return random.choice(file_names)

    else:
        return None


# NOTE: before execute this, run append_to_remote_repo.py
if __name__ == '__main__':
    from common import get_repo
    repo = get_repo()
    print(repo)
    print()

    file_name = get_random_file(repo)
    if file_name:
        import os
        file_name = os.path.basename(file_name)
        message = 'Remove: ' + file_name
        print(message)

        repo.index.remove([file_name])
        repo.index.commit(message)

        repo.remotes.origin.push()
def setup(parsed_args):
    # grab the last field from delimited project name
    environment = parsed_args.project_id.upper().split("-").pop()
    try:
        org = get_org(parsed_args, parsed_args.config_org)
    except BadCredentialsException as e:
        print(e.data)
        print(
            "check token and pass using the --vcs-token (-t) argument or setting"
            "the token in " + SETTINGS.DEFAULT_TOKEN_FILE)
        raise BadCredentialsException(e.status, e.data)

    try:
        existing_repo = get_repo(org, parsed_args.config_repo)
    except GithubException as e:
        if e.status == 404:
            existing_repo = None
        else:
            raise

    if existing_repo and not parsed_args.force:
        print("Repository " + parsed_args.config_repo +
              " already exists. Use --force to reconfigure")
        exit(1)
    elif existing_repo and parsed_args.force:
        repo = existing_repo
        commit_msg = "Update "
    else:
        repo = create_repo(org, name=parsed_args.config_repo)
        commit_msg = "Initial commit"

    # Configure project
    for config_file in parsed_args.change_files.keys():
        if config_file == "project_settings_file":
            config = configure_project_data(
                parsed_args.change_files[config_file],
                project_id=parsed_args.project_id,
                project_name=parsed_args.project_id,
            )
        else:
            config = __file_content(parsed_args.change_files[config_file])
        # Todo: capture update_repo_content exception and exclude if --force is
        #  set
        try:
            # noinspection PyUnboundLocalVariable
            update_repo_file(
                repo,
                SETTINGS.REMOTE_FILES[config_file],
                config,
                commit_msg,
                parsed_args.force,
                parsed_args.bypass_branch_protection,
            )
        except GithubException as e:
            print(e.data)

    # Create teams
    std_team = create_team(org)
    priv_team = create_team(
        org,
        SETTINGS.PRIV_TEAM_ATTRIBUTES["name"],
        SETTINGS.PRIV_TEAM_ATTRIBUTES["permission"],
    )
    admin_team = get_team(org, SETTINGS.ADMIN_TEAM)
    configure_remote_object(
        std_team.url,
        parsed_args.vcs_token,
        description=SETTINGS.STANDARD_TEAM_ATTRIBUTES["description"],
    )
    configure_remote_object(
        priv_team.url,
        parsed_args.vcs_token,
        parent_team_id=std_team.id,
        description=SETTINGS.PRIV_TEAM_ATTRIBUTES["description"],
    )

    # Set repository permission
    if admin_team:
        set_repo_team_perms(org, repo, admin_team.id, "admin")
    set_repo_team_perms(org, repo, std_team.id, "read")
    set_repo_team_perms(org, repo, priv_team.id, "write")
    try:
        set_repo_visibility(repo, "private")
    except GithubException as e:
        print(e.data)

    set_master_branch_permissions(repo, parsed_args.branch_permissions)
    if parsed_args.output_data:
        write_project_data(repo, [std_team, priv_team])

    return True