def last_check(repository_name):
    """
    :param repository_name: Repository name to be checked.
    :return: the last time when the repository was checked
    """
    git_json_filename = "{}.json".format(repository_name)
    try:
        with open(WORKING_DIR + "/git_files/" + git_json_filename, "r") as \
                commit_json:
            json_content = json.load(commit_json)
            try:
                last_checked = datetime.strptime(
                    json_content.get("0").get("lastChecked"),
                    "%Y-%m-%d %H:%M:%S")
                LOGGER.info("Repo last updated on: " + str(last_checked))

            except ValueError:
                last_checked = datetime.strptime(
                    json_content.get("0").get("lastChecked"),
                    "%Y-%m-%d %H:%M:%S.%f")
                LOGGER.info("Repo last updated on: " + str(last_checked))

    except IOError:
        last_checked = LAST_MONTH
    return last_checked
def filter_git_commit_keyword(repository_name, repository_path):
    """
    Filters out only the data that we need from a commit
    Substitute the special characters from commit message using 'sub' function
    from 're' library
    :param repository_name: name of the given repo
    :param repository_path:
    :return: the commits into a dictionary
    TODO: please add the exception blocks since the script fails when it can't
    pull data:
    (e.g raise self.__createException(status, responseHeaders, output)
    github.GithubException.GithubException: 502 {'message': 'Server Error'}
    """
    number = 0
    last_checked = last_check(repository_name)
    new_commit_dict = {"0": {"lastChecked": str(datetime.utcnow())}}
    if limit_checker():
        new_commits = GIT.get_repo(repository_path) \
            .get_commits(since=last_checked)

    for commit in new_commits:
        files_changed_by_commit = [x.filename for x in commit.files]
        if files_changed_by_commit:
            each_commit = {}
            LOGGER.info(commit.commit.message)
            if "deploy" in commit.commit.message:
                number += 1
                each_commit.update({int(number): get_commit_details(commit)})
                new_commit_dict.update(each_commit)
    json_writer_git(repository_name, new_commit_dict)
コード例 #3
0
def replace_bug_with_url(message, LOGGER):
    """
    This function generates and replaces bug numbers with bugzilla links in commit messages.
    Supports MD format.
    :param message: commit message
    :param LOGGER: send the logger object
    :return: the commit message with formatted bug link for MD files.
    """
    commit_text = message.split()
    for element in range(len(commit_text)):
        if "bug" in (commit_text[element].lower()
                     ) and element < len(commit_text) - 1:
            bug_number = re.sub("[(:,.;)]", "", commit_text[element + 1])
            try:
                bug_number = int(''.join(list(filter(str.isdigit,
                                                     bug_number))))
                generated_link = "https://bugzilla.mozilla.org/show_bug.cgi?id=" + \
                                 str(bug_number)
                commit_text[element] = '[' + 'Bug' + ' ' + str(
                    bug_number) + '](' + generated_link + ')'
                commit_text[element + 1] = ''
            except ValueError:
                if LOGGER.root.level == 30:
                    LOGGER.warning(
                        "Invalid bug number: > {} < in message: {}".format(
                            commit_text[element + 1], message))
    commit_text = ' '.join(commit_text)
    return commit_text
def get_last_local_push_id(repo_name):
    """
    :param repo_name: name of the repo we are currently working on
    :return: last_stored_push_id: last push id that is currently stored locally
    """
    hg_json_filename = "{}.json".format(repo_name)
    try:
        with open(WORKING_DIR + "/hg_files/" + hg_json_filename, "r") as \
                commit_json:
            json_content = json.load(commit_json)
        last_stored_push_id = json_content.get("0").get("last_push_id")
        LOGGER.info("Last local push id is: %s", last_stored_push_id)
    except FileNotFoundError:
        last_stored_push_id = 0
        LOGGER.info("No last local push id found, starting from 0 ")
    return last_stored_push_id
def filter_git_tag(repository_name, repository_team, repository_path):
    """
    Filters out only the data that we need from a commit
    Substitute the special characters from commit message using 'sub' function
    from 're' library
    :param repository_team: the team of the given repo
    :param repository_name: name of the given repo
    :param repository_path:
    :return: the commits into a dictionary
    TODO: please add the exception blocks since the script fails when it can't
    pull data:
    (e.g raise self.__createException(status, responseHeaders, output)
     github.GithubException.GithubException: 502 {'message': 'Server Error'}
    """
    number = 0
    latest_releases = get_version(repository_name, repository_team)
    with open("repositories.json", "r") as file:
        json_content = json.load(file)
        version_path = json_content.get("Github").get(repository_name) \
            .get("configuration").get("version-path")
    checker = compare_versions(version_path, repository_name, latest_releases)
    if checker:
        last_commit_date = get_date_from_json(repository_name)
        new_commit_dict = {
            "0": {
                "lastChecked": str(datetime.utcnow()),
                "last_releases": latest_releases
            }
        }
        for commit in GIT.get_repo(repository_path) \
                .get_commits(since=last_commit_date):
            commit_message = commit.commit.message
            bump_version = latest_releases.get("latest_release").get("version")
            if commit_message == bump_version:
                each_commit = {}
                number += 1
                each_commit.update({int(number): get_commit_details(commit)})
                new_commit_dict.update(each_commit)

        json_writer_git(repository_name, new_commit_dict)
    else:
        LOGGER.info("No new changes entered production")
コード例 #6
0
def write_date_header(file_name, datetime_object):
    """
    This function writes a date from a specific datetime object into a file as
     a date header.
    :param file_name: name of the file to be written to.
    :param datetime_object: datetime object used for write to file.
    :return:
    """
    file = open(file_name, "a")

    base_table = "|            | \n" + \
                 "|:----------:| \n"
    date_header = "| Generated on: " + \
                  str(datetime.utcnow()) + \
                  " and contains modifications from: " + \
                  str(datetime_object) + \
                  " |"
    file.write("\n" + base_table + date_header + "\n")
    file.close()
    LOGGER.info("Generated date header for file: %s with datestamp %s",
                    str(file_name), str(datetime.utcnow()))
コード例 #7
0
def generate_main_md_table(repositories_holder, which_repo, days_to_generate):
    """
    Looks into repositories folders (hg_files & git files),
    filters the files to load the json's using a passfilter and calls after
    extraction functions.
    :param which_repo: tells the function for which repo to update the md
    :param days_to_generate: just a pass by parameter, used in extract json
    from git/hg and generates for the specified days.
    :param repositories_holder: repositories
    """
    from fic_modules.git import extract_json_from_git
    successfully_generated = "part from main markdown table was " \
                             "successfully generated."
    list = []
    for i in repositories_holder.get("Github"):
        position = repositories_holder.get("Github").get(i).get("order")
        list.append((position, i, "git"))
    for i in repositories_holder.get("Mercurial"):
        position = repositories_holder.get("Mercurial").get(i).get("order")
        list.append((position, i, "hg"))
    list.sort()
    for element in list:
        if (which_repo == "complete" or which_repo == "Git") and element[2] == "git":
            extract_json_from_git(element[1], days_to_generate)
            LOGGER.info("GIT %s Repository: %s", successfully_generated, element[1])

        if (which_repo == "complete" or which_repo == "Hg") and element[2] == "hg":
            extract_json_from_hg(element[1], days_to_generate)
            LOGGER.info("HG %s Repository: %s", successfully_generated, element[1])

        if which_repo != "complete" and which_repo != "Git" and which_repo != "Hg":
            LOGGER.error("No {} table was generated!".format(element[2]))
def generate_hg_pushes_link(repo_name, repository_url):
    """
    :param repo_name: name of the repo we are currently working on
    :param repository_url: base repository url stored in repositories.json
    :return: generate_pushes_link: a link used for bringing data from HG
    """
    start_id = get_last_local_push_id(repo_name)
    url = repository_url + "json-pushes?version=2"
    response = requests.get(url).text
    data = json.loads(response)
    end_id = data.get("lastpushid")
    if start_id == 0:
        start_id = end_id - NUMBER_OF_CHANGESETS
        if end_id < 100:
            start_id = 1
    start_id = str(start_id)
    end_id = str(end_id)
    generate_pushes_link = repository_url + "json-pushes?version=2&" \
                                            "full=1&startID={}&endID={}" \
                                            .format(start_id, end_id)
    LOGGER.info("Generated link for %s is %s", repo_name, generate_pushes_link)
    return generate_pushes_link
def get_date_from_json(repo_name):
    """
    :param repo_name: name of the repo we are currently working on
    :return: date of the last commit that we have locally in our json
    """
    git_json_filename = "{}.json".format(repo_name)
    try:
        with open(WORKING_DIR + "/git_files/" + git_json_filename, "r") as \
                commit_json:
            json_content = json.load(commit_json)
        last_stored_date = json_content \
            .get("0") \
            .get("last_releases") \
            .get("latest_release") \
            .get("date")
        date_format = parse(last_stored_date)
        last_stored_date = datetime.strptime(str(date_format),
                                             "%Y-%m-%d %H:%M:%S")
    except FileNotFoundError:
        last_stored_date = datetime.strptime("2019-01-01 01:00:00",
                                             "%Y-%m-%d %H:%M:%S")
        LOGGER.info("last local date was: {} ".format(last_stored_date))
    return last_stored_date
    def handle_exception(self):
        if self.e == 301:
            LOGGER.critical("Error code 301: Not Modified")
            exit(301)

        elif self.e == 302:
            LOGGER.critical("Error code 302: Found")
            exit(302)

        elif self.e == 304:
            LOGGER.critical("Error code 301: Not Modified")
            exit(304)

        elif self.e == 307:
            LOGGER.critical("Error code 307: Temporary Redirect")
            exit(307)

        elif self.e == 400:
            LOGGER.critical("Error code 400: Bad Request error")
            exit(400)

        elif self.e == 401:
            LOGGER.critical("Error code 401: Unauthorized error")
            exit(401)

        elif self.e == 403:
            LOGGER.critical("Error code 403: Forbidden error")
            exit(403)

        elif self.e == 404:
            LOGGER.critical("Error code 404: Not Found")
            exit(404)

        elif self.e == 422:
            LOGGER.critical("Error code 422: Unprocessable Entity")
            exit(422)

        elif self.e == 500:
            LOGGER.critical("Error code 500: Internal Server Error")
            exit(500)

        elif self.e == 501:
            LOGGER.critical("Error code 501: Not implemented")
            exit(501)

        elif self.e == 503:
            LOGGER.critical("Error code 503: Service Unavailable")
            exit(503)
コード例 #11
0
def limit_checker():
    """
    This function checks if your limit requests is not exceeded.
    Every time when this function is called, it returns 1 in case of your
    requests limit is not exceeded, otherwise it will wait for the reset time
    to pass.
    :return: returns 1 if your limit requests is not exceeded
    """
    rate_limit = GIT.rate_limiting[0]
    unix_reset_time = GIT.rate_limiting_resettime
    reset_time = datetime.fromtimestamp(unix_reset_time)
    if rate_limit >= 5:
        LOGGER.info("Rate limit is: %s", str(rate_limit))
        return True
    else:
        try:
            LOGGER.info("You have reached the requests limit!")
            LOGGER.info("The requests limit will reset at: %s",
                        str(reset_time))
            while rate_limit < 5000 and reset_time >= datetime.now():
                unix_reset_time = GIT.rate_limiting_resettime
                reset_time = datetime.fromtimestamp(unix_reset_time)
            LOGGER.info("The requests limit has been reset!")
            return True

        except GithubException.status == 403:
            LOGGER.info("The requests limit is reset to: %s", str(reset_time))
        except GithubException.status == 404:
            LOGGER.info("Github is down!\n Please try again later...")
def create_files_for_git(repositories_holder, onerepo):
    """
    Main GIT function. Takes every Git repo from a .json file which is
    populated with repositories and writes all the commit data of each repo in
    a json and MD file formats.
    :param: repositories_holder: Expects a .json file that contains a list of
    repositories.
    :return: The end result is a .json and a .md file for every git repository.
    Can be found inside git_files/
    """
    if onerepo:
        complete_data = {}
        repository_team = REPOSITORIES \
            .get("Github") \
            .get(repositories_holder) \
            .get("team")
        repository_type = REPOSITORIES \
            .get("Github") \
            .get(repositories_holder) \
            .get("configuration") \
            .get("type")
        LOGGER.info("Working on repo: {}".format(repositories_holder))
        folders_to_check = [
            folder
            for folder in REPOSITORIES.get("Github").get(repositories_holder).
            get("configuration").get("folders-to-check")
        ]
        filter_git_commit_data(repositories_holder, repository_team,
                               repository_type, folders_to_check)
        if repositories_holder == "build-puppet":
            create_git_md_table(repositories_holder, "git_files")
            create_md_table_for_scriptworkers(repositories_holder)
        else:
            create_git_md_table(repositories_holder, "git_files")
        work_path = WORKING_DIR + "/git_files/"
        repo_data = populate_changelog_json(work_path, repositories_holder)
        complete_data.update(repo_data)
        LOGGER.info("MD table generated successfully for {}".format(
            repositories_holder))
    else:
        complete_data = {}
        for repo in repositories_holder["Github"]:
            repository_name = repo
            repository_team = repositories_holder \
                .get("Github") \
                .get(repo) \
                .get("team")
            repository_type = repositories_holder \
                .get("Github") \
                .get(repo) \
                .get("configuration") \
                .get("type")
            LOGGER.info("Working on repo: {}".format(repository_name))
            folders_to_check = [
                folder for folder in repositories_holder.get("Github").get(
                    repo).get("configuration").get("folders-to-check")
            ]
            filter_git_commit_data(repository_name, repository_team,
                                   repository_type, folders_to_check)
            if repository_name == "build-puppet":
                create_git_md_table(repository_name, "git_files")
                create_md_table_for_scriptworkers(repository_name)
            else:
                create_git_md_table(repository_name, "git_files")
            work_path = WORKING_DIR + "/git_files/"
            repo_data = populate_changelog_json(work_path, repository_name)
            complete_data.update(repo_data)
            LOGGER.info("MD table generated successfully")
            LOGGER.info("Finished working on {}".format(repository_name))
    return complete_data
def extract_json_from_git(repository, days_to_generate):
    """
    Extracts the json data from json files and writes the data to the main
    markdown table file.
    The function looks into json files after the last commit, extracts it and
    calls the write_main_md_table function.
    :param days_to_generate:
    :param repository:
    :return: none
    """
    from fic_modules.markdown_modules import generate_markdown_header, \
        write_main_md_table
    from fic_modules.helper_functions import generate_repository_url

    nr_days_ago = datetime.utcnow() - timedelta(days=days_to_generate)

    count_pushes = 0
    with open("./changelog.json") as json_file:
        data = json.load(json_file)

        repo = str(repository)
        repository_url = generate_repository_url("git_files", repo, "json")
        repository_json = generate_repository_url("git_files", repo, "md")

        try:
            generate_markdown_header("changelog.md", repo, repository_url,
                                     repository_json)
            if "0" in data:
                del data["0"]

            for commit_iterator in data.get("Github").get(repo):
                commit_number = str(commit_iterator)

                commit_date = data.get("Github")\
                    .get(repo)\
                    .get(commit_number)\
                    .get("commit_date")
                commit_date = datetime.strptime(commit_date,
                                                "%Y-%m-%d %H:%M:%S")

                if commit_date > nr_days_ago:
                    count_pushes = count_pushes + 1
                    commit_description = data.get("Github").get(repo) \
                        .get(commit_number) \
                        .get("commit_message")
                    commit_description = remove_chars(commit_description,
                                                      "\U0001f60b")
                    commit_url = data.get("Github")\
                        .get(repo)\
                        .get(commit_number)\
                        .get("url")
                    commit_url = "[Link](" + commit_url + ")"

                    author = data.get("Github")\
                        .get(repo)\
                        .get(commit_number)\
                        .get("commiter_name")
                    review = "N/A"
                    write_main_md_table("changelog.md", commit_url,
                                        commit_description, author, review,
                                        commit_date)
            if count_pushes == 0:
                commit_url = " "
                if days_to_generate == 1:
                    commit_description = "No push in the last day.. " \
                                         "[see the history of MD " \
                                         "commits](" + \
                                         repository_json + \
                                         ")"
                else:
                    commit_description = "No push in the last " + \
                                         str(days_to_generate) + \
                                         " days.. [see the history of MD" \
                                         " commits](" + \
                                         repository_json + \
                                         ")"
                author = "FIC - BOT"
                review = "Self Generated"
                commit_date = " - "
                write_main_md_table("changelog.md", commit_url,
                                    commit_description, author, review,
                                    commit_date)
        except KeyError:
            LOGGER.info("File " + repo + " is empty. \nPlease check:" +
                        str(repository_url) + " for more details.\n")
コード例 #14
0
def create_hg_md_table(repository_name):
    """
    Uses 'repository_name' parameter to generate markdown tables for every
    json file inside path_to_files parameter.
    :param repository_name: Used to display the repo name in the title row of
    the MD table
    :return: MD tables for every json file inside the git_files dir.
    """

    try:
        json_data = open(WORKING_DIR + "/hg_files/" + "{}.json"
                         .format(repository_name)).read()
        data = json.loads(json_data)
        base_table = "| Changeset | Date | Commiter | " \
                     "Commit Message | Commit URL | \n" + \
                     "|:---:|:---:|:----:|:---------------" \
                     "-------------------:|:-----:| \n"
        tables = {}
        try:
            last_push_id = data.get('0').get("last_push_id")
            md_title = [
                "Repository name: {}\n Current push id: {}"
                .format(repository_name, last_push_id)]
        except AttributeError:
            md_title = ["Error while accessing the " +
                        str(repository_name) + "file."]

        for repo in md_title:
            tables[repo] = base_table

        for key in data:
            if key > "0":
                key = str(len(data) - int(key))
                changeset_id = data.get(key).get("changeset_number")
                date_of_push = data.get(key).get("date_of_push")
                try:
                    for entry in data.get(key).get("changeset_commits"):
                        try:
                            commit_author = data \
                                .get(key) \
                                .get("changeset_commits") \
                                .get(entry) \
                                .get("commiter_name")
                            commit_author = re.sub("\u0131", "i",
                                                   commit_author)
                            commit_author = filter_strings(commit_author)

                            message = data \
                                .get(key) \
                                .get("changeset_commits") \
                                .get(entry).get("commit_message")
                            message = re.sub("\n|", "", message)

                            message = filter_strings(message)
                            message = replace_bug_with_url(message, LOGGER)
                            url = data\
                                .get(key)\
                                .get("changeset_commits")\
                                .get(entry)\
                                .get("url")

                            row = "|" + changeset_id + \
                                  "|" + date_of_push + \
                                  "|" + commit_author + \
                                  "|" + message + \
                                  "|" + url + "\n"

                            for repo in tables.keys():
                                tables[repo] = tables[repo] + row
                        except TypeError:
                            pass
                except TypeError:
                    pass

        md_file_name = "{}.md".format(repository_name)
        md_file = open(WORKING_DIR + "/hg_files/" + md_file_name, "w")

        try:
            for key, value in tables.items():
                if value != base_table:
                    md_file.write("## " + key.upper() + "\n\n")
                    md_file.write(value + "\n\n")
        except KeyError:
            pass

        md_file.close()
    except FileNotFoundError:
        LOGGER.error("Json for %s is empty! Skipping!", repository_name)
コード例 #15
0
def create_git_md_table(repository_name, path_to_files):
    """
    Uses 'repository_name' parameter to generate markdown tables for every
    json file inside path_to_files parameter.
    :param repository_name: Used to display the repo name in the title row of
    the MD table
    :param path_to_files: Used to store path to json files (git_files,
    hg_files)
    :return: MD tables for every json file inside the git_files dir.
    """

    try:
        json_data = open(
            WORKING_DIR + "/{}/"
            .format(path_to_files) + "{}.json"
            .format(repository_name))\
            .read()
        data = json.loads(json_data)
        base_table = "| Commit Number | Commiter | Commit " \
                     "Message | Commit Url | Date | \n" + \
                     "|:---:|:----:|:---------------------" \
                     "-------------:|:------:|:----:| \n"
        tables = {}
        try:
            version = data\
                .get('0')\
                .get("last_two_releases")\
                .get("LatestRelease")\
                .get("version")
            date = data\
                .get('0')\
                .get("last_two_releases")\
                .get("LatestRelease")\
                .get("date")
            md_title = [
                "Repository name: {}\n Current version: {} released on {}"
                .format(repository_name, version, date)]
        except AttributeError:
            md_title = ["{} commit markdown table since {}"
                        .format(repository_name, LAST_WEEK)]

        commit_number_list = [key for key in data]

        for repo in md_title:
            tables[repo] = base_table

        for key in data:
            commit_number = commit_number_list[-1]
            try:
                commit_author = data.get(key).get("commiter_name")
                commit_author = re.sub("\u0131", "i", commit_author)
                date = data.get(key).get("commit_date")
                message = data.get(key).get("commit_message")
                message = remove_chars(message, "\U0001f60b")
                message = re.sub("\|", "\|", message)
                url = data.get(key).get("url")

                commit_author = filter_strings(commit_author)
                message = filter_strings(message)
                message = replace_bug_with_url(message, LOGGER)
                row = "|" + commit_number + \
                      "|" + commit_author + \
                      "|" + message + \
                      "|" + "[URL](" + url + ")" + \
                      "|" + date + "\n"

                del commit_number_list[-1]
                for repo in tables.keys():
                    tables[repo] = tables[repo] + row
            except TypeError:
                pass

        md_file_name = "{}.md".format(repository_name)
        md_file = open(WORKING_DIR + "/{}/".format(path_to_files) +
                       md_file_name, "w")

        try:
            for key, value in tables.items():
                if value != base_table:
                    md_file.write("## " + key.upper() + "\n\n")
                    md_file.write(value + "\n\n")
        except KeyError:
            pass

        md_file.close()
    except FileNotFoundError:
        LOGGER.info("Json for %s is empty! Skipping!", repository_name)
コード例 #16
0
now = datetime.now()

branch_name = 'auto-push'
try:
    # Setup environment
    subprocess.call(['git', 'checkout', 'master'])
    subprocess.call(['git', 'pull'])
    subprocess.call(['git', 'branch', '-D', branch_name])
    subprocess.call(['git', 'checkout', '-b', branch_name])
    # Run FIC
    subprocess.call(['python', 'client.py', '-c', '-l'])
    # Prepare commit
    subprocess.call(['git', 'add', 'git_files/', 'hg_files/',
                     'changelog.md', 'changelog.json'])
    commit_message = "Changelog:  " + str(datetime.utcnow()) + \
                     "\n- updated git files \n- updated hg files" \
                     "\n- updated changelog.md \n- updated changelog.json"
    subprocess.call(['git', 'commit', '-m', commit_message])
    # Push commit to origin
    subprocess.call(['git', 'push', '--set-upstream', 'origin', branch_name])
    subprocess.call(['git', 'push'])
    subprocess.call(['git', 'checkout', 'master'])
    subprocess.call(['git', 'pull'])
    print("\n\nAuto Update Process finished.")
    toaster.show_toast("FireFox Infra Changelog",
                       "Generated files have been updated")
except Exception as e:
    toaster.show_toast("FireFox Infra Changelog",
                       "Failed to update the generated files")
    LOGGER.error(e)
def filter_git_tag_bp(repository_name, repository_path):
    """
    Filters out only the data that we need from a commit
    Substitute the special characters from commit message using 'sub' function
    from 're' library
    :param repository_name: name of the given repo
    :param repository_path:
    :return: the commits into a dictionary
    TODO: please add the exception blocks since the script fails when it can't
    pull data:
    (e.g raise self.__createException(status, responseHeaders, output)
    github.GithubException.GithubException: 502 {'message': 'Server Error'}
    """
    number = 0
    commit_number_tracker = 1
    pathway = REPOSITORIES.get("Github") \
        .get(repository_name) \
        .get("configuration") \
        .get("files-to-check")
    last_checked = last_check(repository_name)
    new_commit_dict = {"0": {"lastChecked": str(datetime.utcnow())}}

    if limit_checker():
        new_commits = GIT.get_repo(repository_path) \
            .get_commits(since=last_checked)

    for commit in new_commits:
        each_commit = {}
        switch = False
        LOGGER.info("Commit number: " + str(commit_number_tracker))
        commit_number_tracker += 1
        files_changed_by_commit = [x.filename for x in commit.files]
        LOGGER.info(files_changed_by_commit)
        LOGGER.info(len(files_changed_by_commit))
        changed_file_number = 1
        for entry in files_changed_by_commit:
            LOGGER.info("changed file number: {} ".format(changed_file_number))
            LOGGER.info(entry)
            changed_file_number += 1
            for scriptworkers in pathway:
                LOGGER.info("checking repo:{} ".format(scriptworkers))
                if entry in pathway[scriptworkers]:
                    LOGGER.debug("This commit needs to be saved")
                    switch = True
                    break
        if switch:
            number += 1
            each_commit.update({int(number): get_commit_details(commit)})
            new_commit_dict.update(each_commit)
    json_writer_git(repository_name, new_commit_dict)
def filter_hg_commit_data(repository_name, folders_to_check, repository_url):
    """
    This function generates data for hg json files
    :param repository_url:
    :param folders_to_check:
    :param repository_name: name of the repository
    :return: Writes data in hg json files
    """
    LOGGER.info("Repo url: %s", repository_url)
    link = generate_hg_pushes_link(repository_name, repository_url)
    data = json.loads(requests.get(link).text)
    last_push_id = data.get("lastpushid")
    hg_repo_data = {}
    number = 0
    hg_repo_data.update({"0": {"last_push_id": last_push_id}})
    for key in data.get("pushes"):
        number += 1
        changeset_number = key
        changeset_pusher = data.get("pushes").get(key).get("user")
        date_of_push = data.get("pushes").get(key).get("date")
        hg_repo_data.update({
            number: {
                "changeset_number":
                changeset_number,
                "pusher":
                changeset_pusher,
                "date_of_push":
                time.strftime('%Y-%m-%d %H:%M:%S',
                              time.localtime(date_of_push)),
                "changeset_commits": {}
            }
        })
        counter, counter2, counter3 = 0, 0, 0
        for keys in data.get("pushes").get(key).get("changesets"):
            counter = [
                x + 1 for x in range(
                    len(data.get("pushes").get(key).get("changesets")))
            ]
            node = keys.get("node")
            url = repository_url + "pushloghtml?changeset=" + node[:12]
            author = keys.get("author")
            desc = keys.get("desc")
            files_changed = []
            for entry in keys.get("files"):
                files_changed.append(entry)
            try:
                counter2 = counter[counter3]
                counter3 += 1
            except IndexError:
                pass
            if folders_to_check:
                if compare_files(files_changed, folders_to_check):
                    hg_repo_data[number]["changeset_commits"].update({
                        counter2: {
                            "url": url,
                            "commiter_name": author,
                            "commit_message": desc,
                            "files_changed": files_changed
                        }
                    })
        if hg_repo_data[number].get('changeset_commits') == {}:
            hg_repo_data.pop(number)
    json_writer_hg(repository_name, hg_repo_data)
def extract_json_from_hg(repo_name, days_to_generate):
    """
    Extracts the json data from json files and writes the data to the main
    markdown table file.
    The function looks into json files after the last commit, extracts it and
    calls the write_main_md_table function.
    :param days_to_generate:
    :param repo_name:
    :return: none
    """
    from fic_modules.markdown_modules import generate_markdown_header,\
        write_main_md_table
    from fic_modules.helper_functions import generate_repository_url
    repo_name = str(repo_name).lower()
    nr_days_ago = datetime.utcnow() - timedelta(days=days_to_generate)

    with open("./changelog.json") as json_file:
        data = json.load(json_file)
        repository_url = generate_repository_url("hg_files", repo_name, "md")
        repository_json = generate_repository_url("hg_files", repo_name,
                                                  "json")

    try:
        generate_markdown_header("changelog.md", repo_name, repository_url,
                                 repository_json)
        count_pushes = 0
        for changeset_iterator in sorted(data.get("Hg").get(repo_name),
                                         reverse=True):
            for commit_iterator in data.get("Hg").get(repo_name) \
                    .get(changeset_iterator)\
                    .get("changeset_commits"):
                commit_date = data.get("Hg").get(repo_name) \
                    .get(changeset_iterator)\
                    .get("date_of_push")
                commit_date = datetime.strptime(commit_date,
                                                "%Y-%m-%d %H:%M:%S")

                if commit_date > nr_days_ago:
                    count_pushes = count_pushes + 1
                    commit_description = data.get("Hg")\
                        .get(repo_name)\
                        .get(changeset_iterator) \
                        .get("changeset_commits") \
                        .get(commit_iterator) \
                        .get("commit_message")
                    commit_description = \
                        remove_chars(commit_description, "\n")
                    commit_description = \
                        replace_bug_with_url(commit_description, LOGGER)
                    commit_url = data.get("Hg").get(repo_name)\
                        .get(changeset_iterator) \
                        .get("changeset_commits") \
                        .get(commit_iterator) \
                        .get("url")
                    commit_url = "[Link](" + commit_url + ")"
                    author = data.get("Hg")\
                        .get(repo_name)\
                        .get(changeset_iterator)\
                        .get("pusher")
                    review = extract_reviewer(commit_description)
                    write_main_md_table("changelog.md", commit_url,
                                        commit_description, author, review,
                                        commit_date)
        if int(count_pushes) == int(0):
            commit_url = " "
            if int(days_to_generate) == int(1):
                commit_description = "No push in the last day.. " \
                                     "[see the history of MD " \
                                     "commits](" + \
                                     repository_url + \
                                     ")"
            else:
                commit_description = "No push in the last " + \
                                     str(days_to_generate) + \
                                     " days.. [see the history of MD" \
                                     " commits](" + \
                                     repository_url + \
                                     ")"
            author = "FIC - BOT"
            review = "Self Generated"
            commit_date = " - "
            write_main_md_table("changelog.md", commit_url, commit_description,
                                author, review, commit_date)
    except AttributeError:
        LOGGER.exception(
            "Attribute Error!! \n Probable issue is a "
            "malfunctioned json file.. Please check the "
            "following file: %s", repo_name)
    except KeyError:
        LOGGER.exception(
            "File %s is empty. Please check: %s for more "
            "details.", repo_name, repository_url)

    except TypeError:
        LOGGER.exception("TypeError encountered")
        pass