Esempio n. 1
0
def delete_old_jobs(queue):
    max_date = datetime.now() - timedelta(days=int(args.max_days))

    while queue.qsize() > 0:
        project = queue.get()

        gitlab_lib.log("Getting jobs for project [%d] %s" %
                       (project["id"], project["name_with_namespace"]))

        for job in gitlab_lib.get_jobs(project):
            job_date = parse(job["created_at"])
            max_date = max_date.replace(tzinfo=job_date.tzinfo)

            gitlab_lib.debug(
                "Checking job %d of project [%d] %s with create date %s" %
                (job["id"], project["id"], project["name_with_namespace"],
                 job["created_at"]))

            if job_date < max_date:
                gitlab_lib.log(
                    "Deleting job %d of project [%d] %s" %
                    (job["id"], project["id"], project["name_with_namespace"]))

                if not args.dryrun:
                    gitlab_lib.delete_job(project["id"], job["id"])
def archive_directory(project, component, directory, backup_dir):
    """
    Archivate directory to backup_dir
    """
    if os.path.exists(directory):
        gitlab_lib.log("Backing up %s from project %s [ID %s]" % (component, project['name'], project['id']))

        if component == "upload":
            archivate(directory, backup_dir, "upload_")
        else:
            archivate(directory, backup_dir)
    else:
        gitlab_lib.log("No %s found for project %s [ID %s]" % (component, project['name'], project['id']))
def backup_user_metadata(username):
    """
    Backup all metadata including email addresses and SSH keys of a single user
    """

    user = gitlab_lib.get_user_metadata(username)
    backup_dir = os.path.join(OUTPUT_BASEDIR, "user_%s_%s" % (user['id'], user['username']))
    if not os.path.exists(backup_dir): os.mkdir(backup_dir)
    
    gitlab_lib.log(u"Backing up metadata of user %s [ID %s]" % (user["username"], user["id"]))
    dump(backup_dir, "user.json", user)
    dump(backup_dir, "projects.json", gitlab_lib.get_projects(username))
    dump(backup_dir, "ssh.json", gitlab_lib.fetch(gitlab_lib.USER_SSHKEYS % (gitlab_lib.API_URL, user["id"])))
    dump(backup_dir, "email.json", gitlab_lib.fetch(gitlab_lib.USER_EMAILS % (gitlab_lib.API_URL, user["id"])))
def fill_restore_queue(project, component):
    """
    Fill queue with restore data
    project is project metadata dictionary
    component is the name of the component like the keys in PROJECT_COMPONENTS
    """
    restore_file = os.path.join(args.backup_dir, component + ".json")
    backup = gitlab_lib.parse_json(restore_file)

    if backup:
        for entry in backup:
            entry['component'] = component
            queue.put(entry)
    else:
        gitlab_lib.log("Nothing to do for " + component)
def backup_issues(api_url, project, token, backup_dir):
    """
    Backup all issues of a project
    issue notes must be backuped by additional api call
    """
    gitlab_lib.log(u"Backing up issues from project %s [ID %s]" % (project['name'], project['id']))

    issues = gitlab_lib.fetch(api_url % (gitlab_lib.API_URL, project['id']))

    dump(backup_dir, "issues.json", issues)

    for issue in issues:
        notes = gitlab_lib.fetch(gitlab_lib.NOTES_FOR_ISSUE % (gitlab_lib.API_URL, project['id'], issue['id']))

        if notes:
            dump(backup_dir, "issue_%d_notes.dump" % (issue['id'],), notes)
def backup(repository_dir, queue):
    """
    Backup everything for the given project
    For every project create a dictionary with id_name as pattern
    Dump project metadata and each component as separate JSON files
    """
    while not queue.empty():
        project = queue.get()
        backup_dir = os.path.join(OUTPUT_BASEDIR, "%s_%s_%s" % (project['id'], project['namespace']['name'], project['name']))
        if not os.path.exists(backup_dir): os.mkdir(backup_dir)

        dump(backup_dir, "project.json", project)

        # shall we backup local data like repository and wiki?
        if repository_dir:
            backup_local_data(repository_dir, args.upload, backup_dir, project)

        # backup metadata of each component
        for (component, api_url) in gitlab_lib.PROJECT_COMPONENTS.items():
            # issues
            if component == "issues" and \
               project.get(component + "_enabled") == True:
                backup_issues(api_url, project, args.token, backup_dir)

            # snippets
            elif component == "snippets" and \
               project.get(component + "_enabled") == True:
                backup_snippets(api_url, project, backup_dir)

            # milestones are enabled if either issues or merge_requests are enabled
            # labels cannot be disabled therefore no labels_enabled field exists
            # otherwise check if current component is enabled in project
            elif component == "milestones" and \
                 (project.get("issues_enabled") == True or project.get("merge_requests_enabled") == True) or \
                 component == "labels" or \
                 project.get(component + "_enabled") == True:
                gitlab_lib.log(u"Backing up %s from project %s [ID %s]" % (component, project['name'], project['id']))
                dump(backup_dir,
                     component + ".json",
                     gitlab_lib.fetch(api_url % (gitlab_lib.API_URL, project['id'])))

            else:
                gitlab_lib.log("Component %s disabled for project %s [ID %s]" % (component, project['name'], project['id']))
def backup_snippets(api_url, project, backup_dir):
    """
    Backup snippets and their contents
    snippet contents must be backuped by additional api call
    """
    gitlab_lib.log(u"Backing up snippets from project %s [ID %s]" % (project['name'], project['id']))

    snippets = gitlab_lib.fetch(api_url % (gitlab_lib.API_URL, project['id']))

    dump(backup_dir, "snippets.json", snippets)

    for snippet in snippets:
        dump(backup_dir,
             "snippet_%d_content.dump" % (snippet['id'],),
             gitlab_lib.rest_api_call(gitlab_lib.GET_SNIPPET_CONTENT % (gitlab_lib.API_URL, project['id'], snippet['id']), method="GET").text)

        notes = gitlab_lib.fetch(gitlab_lib.NOTES_FOR_SNIPPET % (gitlab_lib.API_URL, project['id'], snippet['id']))

        if notes:
            dump(backup_dir, "snippet_%d_notes.dump" % (snippet['id'],), notes)
def backup_local_data(repository_dir, upload_dir, backup_dir, project):
    """
    Backup repository upload and wiki data locally for the given project
    """
    src_dirs = { "repository": os.path.join(repository_dir, project['namespace']['name'], project['name'] + ".git"),
                 "wiki": os.path.join(repository_dir, project['namespace']['name'], project['name'] + ".wiki.git"),
                 "upload": os.path.join(upload_dir, project['namespace']['name'], project['name']) }

    for (component, directory) in src_dirs.iteritems():
        try_again = 3

        while try_again:
            try:
                archive_directory(project, component, directory, backup_dir)
                try_again = False
            except OSError,e:
                try_again = try_again - 1

                if not try_again:
                    gitlab_lib.log("Failed to backup %s %s: %s" % (project['name'], component, str(e)))
def fill_restore_queue(project, component):
    """
    Fill queue with restore data
    project is project metadata dictionary
    component is the name of the component like the keys in PROJECT_COMPONENTS
    """
    restore_file = os.path.join(args.backup_dir, component + ".json")
    iid_counter = 1

    if os.path.isfile(restore_file):
        backup = gitlab_lib.parse_json(restore_file)

        if backup:
            for entry in backup:
                entry['component'] = component
                entry['project_id'] = project['id']

                if entry.get('iid'):
                    entry['iid'] = iid_counter
                    iid_counter = iid_counter + 1

                work_queue.put(entry)
        else:
            gitlab_lib.log("Nothing to do for " + component)
Esempio n. 10
0
def restore_entry(project, queue):
    """
    Restore a single entry of a project component
    """
    while not queue.empty():
        entry = queue.get()

        gitlab_lib.log("Restoring %s [%s]" % (entry['component'], entry.get('name') or "ID " + str(entry.get('id'))))

        # for snippets we must additionally restore the content file
        if entry['component'] == "snippets":
            restore_snippets(project, entry)
        else:
            result = gitlab_lib.rest_api_call(gitlab_lib.PROJECT_COMPONENTS[entry['component']] % (gitlab_lib.API_URL, project['id']),
                                              gitlab_lib.prepare_restore_data(project, entry))

            if entry['component'] == "issues":
                result = result.json()
                gitlab_lib.rest_api_call(gitlab_lib.ISSUE_EDIT % (gitlab_lib.API_URL, project['id'], result.get('id')),
                                         gitlab_lib.prepare_restore_data(project, update_issue_metadata(entry)),
                                         "PUT")
                restore_notes(gitlab_lib.NOTES_FOR_ISSUE % (gitlab_lib.API_URL, project['id'], str(entry.get('id'))),
                              project,
                              entry)
def clean_shutdown(signal, frame):
    for process in processes:
        process.terminate()

    sys.exit(1)


signal(SIGINT, clean_shutdown)

#
# MAIN PART
#

# Check backup exists and looks reasonable
if not os.path.exists(args.backup_dir):
    gitlab_lib.log(args.backup_dir + " is not a readable.")
    sys.exit(1)

if not os.path.exists(os.path.join(args.backup_dir, "project.json")):
    gitlab_lib.log(
        args.backup_dir +
        " does not look like a projects backup dir. No project.json file found!"
    )
    sys.exit(1)

# Got project id? Lookup metadata of project
project_data = {}

try:
    result = gitlab_lib.get_project_metadata(int(args.project))
Esempio n. 12
0
    else:
        gitlab_lib.log("Nothing to do for " + component)


#
# MAIN PART
#

queue = Queue()
gitlab_lib.DEBUG = args.debug
gitlab_lib.TOKEN = args.token
gitlab_lib.SERVER = args.server

# Check backup exists and looks reasonable
if not os.path.exists(args.backup_dir):
    gitlab_lib.log(args.backup_dir + " is not a readable.")
    sys.exit(1)

if not os.path.exists(os.path.join(args.backup_dir, "project.json")):
    gitlab_lib.log(args.backup_dir + " does not look like a projects backup dir. No project.json file found!")
    sys.exit(1)

# Lookup metadata of destination project
project_data = gitlab_lib.get_project_metadata(args.project)

if not project_data or len(project_data) == 0:
    gitlab_lib.log("Cannot find project " + args.project)
    sys.exit(1)

if len(project_data) > 1:
    gitlab_lib.log("Found more then one project for " + args.project)