def main():
    """ db_uninstaller main loop """

    # handle parser option
    options = get_options()

    notify = lambda n: raw_input('Uninstall Trafodion on [%s] [N]: ' % n)

    format_output('Trafodion Uninstall Start')

    if options.pwd:
        pwd = getpass.getpass('Input remote host SSH Password: '******''

    node_list = ''
    # parse node list from trafodion_config
    if os.path.exists(TRAF_CFG_FILE):
        with open(TRAF_CFG_FILE, 'r') as f:
            traf_cfgs = f.readlines()
        try:
            line = [l for l in traf_cfgs if 'NODE_LIST' in l][0]
            node_list = re.search(r'NODE_LIST="(.*)"', line).groups()[0]
        except Exception as e:
            err_m('Cannot find node list info from %s: %s' % (TRAF_CFG_FILE, e))
    # parse node list from installation config file
    elif options.cfgfile:
        if not os.path.exists(options.cfgfile):
            err_m('Cannot find config file \'%s\'' % options.cfgfile)
        config_file = options.cfgfile
        p = ParseInI(config_file, 'dbconfigs')
        cfgs = p.load()
        node_list = cfgs['node_list']
    # user input
    else:
        node_lists = raw_input('Enter Trafodion node list to uninstall(separated by comma): ')
        if not node_lists: err_m('Empty value')
        node_list = ' '.join(expNumRe(node_lists))

    if not options.silent:
        rc = notify(node_list)
        if rc.lower() != 'y': sys.exit(1)

    nodes = node_list.split()
    first_node = nodes[0]

    remotes = [Remote(node, pwd=pwd) for node in nodes]
    sudo_prefix = get_sudo_prefix()

    # remove trafodion userid and group on all trafodion nodes, together with folders
    for remote in remotes:
        info('Remove Trafodion on node [%s] ...' % remote.host)
        remote.execute('ps -f -u %s|awk \'{print $2}\'|xargs %s kill -9' % (TRAF_USER, sudo_prefix), chkerr=False)
        remote.execute('trafid=`getent passwd %s|awk -F: \'{print $3}\'`; if [[ -n $trafid ]]; then ps -f -u $trafid|awk \'{print $2}\'|xargs %s kill -9; fi' % (TRAF_USER, sudo_prefix), chkerr=False)
        remote.execute('%s /usr/sbin/userdel -rf %s' % (sudo_prefix, TRAF_USER), chkerr=False)
        remote.execute('%s /usr/sbin/groupdel %s' % (sudo_prefix, TRAF_USER), chkerr=False)
        remote.execute('%s rm -rf /etc/security/limits.d/%s.conf %s /tmp/hsperfdata_%s 2>/dev/null' % (sudo_prefix, TRAF_USER, TRAF_CFG_DIR, TRAF_USER), chkerr=False)

    run_cmd('rm -f %s/*.status %s' % (INSTALLER_LOC, DBCFG_FILE))
    format_output('Trafodion Uninstall Completed')
示例#2
0
def set_project(project_name):
    """Sets the project to the given project name.

    Args:
        project_name: str. The name of the project.
    """
    common.run_cmd([GCLOUD_PATH, 'config', 'set', 'project', project_name])
示例#3
0
def main(personal_access_token):
    """Updates the files corresponding to LOCAL_FECONF_PATH and
    LOCAL_CONSTANTS_PATH after doing the prerequisite checks.

    Args:
        personal_access_token: str. The personal access token for the
            GitHub id of user.
    """
    # Do prerequisite checks.
    common.require_cwd_to_be_oppia()
    assert common.is_current_branch_a_release_branch()
    common.ensure_release_scripts_folder_exists_and_is_up_to_date()
    try:
        python_utils.url_open(TERMS_PAGE_URL)
    except Exception:
        raise Exception('Terms mainpage does not exist on Github.')

    try:
        check_updates_to_terms_of_service(personal_access_token)
        add_mailgun_api_key()

        apply_changes_based_on_config(LOCAL_FECONF_PATH, FECONF_CONFIG_PATH,
                                      FECONF_REGEX)
        apply_changes_based_on_config(LOCAL_CONSTANTS_PATH,
                                      CONSTANTS_CONFIG_PATH, CONSTANTS_REGEX)
    except Exception as e:
        common.run_cmd(
            ['git', 'checkout', '--', LOCAL_FECONF_PATH, LOCAL_CONSTANTS_PATH])
        raise Exception(e)

    common.ask_user_to_confirm(
        'Done! Please check manually to ensure all the changes are correct.')
def main(tokens=None):
    """Check if a PR is low-risk."""
    parser = argparse.ArgumentParser()
    parser.add_argument('pr_url', help='The URL of the pull request.')
    args = parser.parse_args(args=tokens)
    parsed_url = parse_pr_url(args.pr_url)
    if not parsed_url:
        raise RuntimeError('Failed to parse PR URL %s' % args.pr_url)
    owner, repo, number = parsed_url
    pr = lookup_pr(owner, repo, number)
    if not pr:
        raise RuntimeError('Failed to load PR from GitHub API')
    base_repo_url = pr['base']['repo']['clone_url']
    common.run_cmd(['git', 'remote', 'add', UPSTREAM_REMOTE, base_repo_url])
    base_branch = pr['base']['ref']
    common.run_cmd(['git', 'fetch', UPSTREAM_REMOTE, base_branch])
    diff_files, file_diffs = load_diff(pr['base']['ref'])
    if not diff_files:
        raise RuntimeError('Failed to load PR diff')
    for low_risk_type, low_risk_checker in LOW_RISK_CHECKERS:
        reason_not_low_risk = low_risk_checker(pr, diff_files, file_diffs)
        if reason_not_low_risk:
            python_utils.PRINT(
                'PR is not a low-risk PR of type %s because: %s' %
                (low_risk_type, reason_not_low_risk))
        else:
            python_utils.PRINT('PR is low-risk. Skipping some CI checks.')
            return 0
    python_utils.PRINT('PR is not low-risk. Running all CI checks.')
    return 1
示例#5
0
    def copy_files():
        # package trafodion binary into a tar file
        if not os.path.exists(TRAF_PKG_FILE):
            info('Creating trafodion packages of %s, this will take a while ...' % cfgs['traf_home'])
            run_cmd_as_user(cfgs['traf_user'], 'cd %s; tar czf %s ./* --exclude logs/* --exclude core.* --exclude tmp/*' % (cfgs['traf_home'], TRAF_PKG_FILE))
        else:
            info('Using existing trafodion package %s' % TRAF_PKG_FILE)

        info('Copying trafodion files to new nodes, this will take a while ...')
        run_cmd('%s cp -rf %s/../.ssh /tmp' % (get_sudo_prefix(), cfgs['traf_home']))
        run_cmd('%s chmod -R 755 /tmp/.ssh' % get_sudo_prefix())
        traf_ssh_folder = '/tmp/.ssh'

        hbase_trx_file = cmd_output('ls %s/hbase-trx-*' % cfgs['hbase_lib_path'])
        trafodion_utility_file = cmd_output('ls %s/trafodion-utility-*' % cfgs['hbase_lib_path'])

        files = [TRAF_CFG_FILE, TRAF_PKG_FILE, traf_ssh_folder, hbase_trx_file, trafodion_utility_file]

        remote_insts = [Remote(h, pwd=pwd) for h in new_nodes]
        threads = [Thread(target=r.copy, args=(files, '/tmp')) for r in remote_insts]
        for thread in threads: thread.start()
        for thread in threads: thread.join()

        for r in remote_insts:
            if r.rc != 0: err_m('Failed to copy files to %s' % r.host)
示例#6
0
def main():
    """ db_uninstaller main loop """

    # handle parser option
    options = get_options()

    notify = lambda n: raw_input('Uninstall Trafodion on [%s] [N]: ' % n)

    format_output('Trafodion Uninstall Start')

    if options.pwd:
        pwd = getpass.getpass('Input remote host SSH Password: '******''

    try:
      traf_var = run_cmd_as_user(TRAF_USER,"echo $TRAF_VAR")
      node_list = run_cmd_as_user(TRAF_USER,"trafconf -name")
    except:
      if options.cfgfile:
        if not os.path.exists(options.cfgfile):
            err_m('Cannot find config file \'%s\'' % options.cfgfile)
        config_file = options.cfgfile
        p = ParseInI(config_file, 'dbconfigs')
        cfgs = p.load()
        traf_var = cfgs['traf_var']
        node_list = cfgs['node_list']
      # user input
      else:
        traf_var = '/var/lib/trafodion'
        node_lists = raw_input('Enter Trafodion node list to uninstall(separated by comma): ')
        if not node_lists: err_m('Empty value')
        node_list = ' '.join(expNumRe(node_lists))

    if not options.silent:
        rc = notify(node_list)
        if rc.lower() != 'y': sys.exit(1)

    nodes = node_list.split()
    first_node = nodes[0]

    remotes = [Remote(node, pwd=pwd) for node in nodes]
    sudo_prefix = get_sudo_prefix()

    # remove trafodion userid and group on all trafodion nodes, together with folders
    for remote in remotes:
        info('Remove Trafodion on node [%s] ...' % remote.host)
        remote.execute('ps -f -u %s|awk \'{print $2}\'|xargs %s kill -9' % (TRAF_USER, sudo_prefix), chkerr=False)
        remote.execute('trafid=`getent passwd %s|awk -F: \'{print $3}\'`; if [[ -n $trafid ]]; then ps -f -u $trafid|awk \'{print $2}\'|xargs %s kill -9; fi' % (TRAF_USER, sudo_prefix), chkerr=False)
        remote.execute('%s /usr/sbin/userdel -rf %s' % (sudo_prefix, TRAF_USER), chkerr=False)
        remote.execute('%s /usr/sbin/groupdel %s' % (sudo_prefix, TRAF_USER), chkerr=False)
        remote.execute('%s rm -rf /etc/security/limits.d/%s.conf %s %s /tmp/hsperfdata_%s 2>/dev/null' % (sudo_prefix, TRAF_USER, TRAF_CFG_DIR, traf_var, TRAF_USER), chkerr=False)

    run_cmd('rm -f %s/*.status %s' % (INSTALLER_LOC, DBCFG_FILE))
    format_output('Trafodion Uninstall Completed')
示例#7
0
def get_extra_jobs_due_to_schema_changes(
        remote_alias, previous_release_version):
    """Finds additional jobs which should be run based on
    schema changes in feconf.

    Args:
        remote_alias: str. The alias for Oppia repo.
        previous_release_version: str. The version of the previous release.

    Returns:
        list(str). The list of jobs to run based on schema changes.
    """
    schema_versions_to_jobs_mapping = {
        'CURRENT_COLLECTION_SCHEMA_VERSION': 'CollectionMigrationOneOffJob',
        'CURRENT_STATE_SCHEMA_VERSION': 'ExplorationMigrationJobManager',
        'CURRENT_SKILL_CONTENTS_SCHEMA_VERSION': 'SkillMigrationOneOffJob',
        'CURRENT_MISCONCEPTIONS_SCHEMA_VERSION': 'SkillMigrationOneOffJob',
        'CURRENT_RUBRIC_SCHEMA_VERSION': 'SkillMigrationOneOffJob',
        'CURRENT_STORY_CONTENTS_SCHEMA_VERSION': 'StoryMigrationOneOffJob',
        'CURRENT_SUBTOPIC_SCHEMA_VERSION': 'TopicMigrationOneOffJob',
        'CURRENT_STORY_REFERENCE_SCHEMA_VERSION': 'TopicMigrationOneOffJob'}

    diff_output = common.run_cmd([
        'git', 'diff', '%s/develop' % remote_alias,
        '%s/release-%s' % (remote_alias, previous_release_version),
        'feconf.py'])
    feconf_diff = diff_output[:-1].split('\n')

    jobs_to_run = []
    for version_key in schema_versions_to_jobs_mapping:
        for line in feconf_diff:
            if line.startswith(('+%s' % version_key, '-%s' % version_key)):
                jobs_to_run.append(schema_versions_to_jobs_mapping[version_key])

    return list(set(jobs_to_run))
示例#8
0
def install_chrome(version):
    """Install Chrome from the URL in URL_TEMPLATE.

    Args:
        version: str. The version of Chrome to install. This must be one
            of the versions available from
            github.com/webnicer/chrome-downloads.
    """
    _ = common.run_cmd(['sudo', 'apt-get', 'update'])
    _ = common.run_cmd(['sudo', 'apt-get', 'install', 'libappindicator3-1'])
    _ = common.run_cmd([
        'curl', '-L', '-o', CHROME_DEB_FILE,
        URL_TEMPLATE.format(version)])
    _ = common.run_cmd([
        'sudo', 'sed', '-i',
        's|HERE/chrome\\"|HERE/chrome\\" --disable-setuid-sandbox|g',
        '/opt/google/chrome/google-chrome'])
    _ = common.run_cmd(['sudo', 'dpkg', '-i', CHROME_DEB_FILE])
    def notify_user(self):
        """ show the final configs to user """
        format_output('Final Configs')
        title = ['config type', 'value']
        pt = PrettyTable(title)
        for item in title:
            pt.align[item] = 'l'

        for key, value in sorted(cfgs.items()):
            # only notify user input value
            if self.in_data.has_key(key) and value:
                if self.in_data[key].has_key('ispasswd'): continue
                pt.add_row([key, value])
        print pt
        confirm = self.get_confirm()
        if confirm != 'Y':
            if os.path.exists(DBCFG_FILE): os.remove(DBCFG_FILE)
            run_cmd('rm -rf %s/*.status' % INSTALLER_LOC)
            log_err('User quit')
示例#10
0
    def notify_user(self):
        """ show the final configs to user """
        format_output('Final Configs')
        title = ['config type', 'value']
        pt = PrettyTable(title)
        for item in title:
            pt.align[item] = 'l'

        for key, value in sorted(cfgs.items()):
            # only notify user input value
            if self.in_data.has_key(key) and value:
                if self.in_data[key].has_key('ispasswd'): continue
                pt.add_row([key, value])
        print pt
        confirm = self.get_confirm()
        if confirm != 'Y':
            if os.path.exists(DBCFG_FILE): os.remove(DBCFG_FILE)
            run_cmd('rm -rf %s/*.status' % INSTALLER_LOC)
            log_err('User quit')
def create_branch(repo, repo_fork, target_branch, github_username,
                  current_release_version_number):
    """Creates a new branch with updates to AUTHORS, CHANGELOG,
    CONTRIBUTORS and about-page.

    Args:
        repo: github.Repository.Repository. The PyGithub object for the
            original repo.
        repo_fork: github.Repository.Repository. The PyGithub object for the
            forked repo.
        target_branch: str. The name of the target branch.
        github_username: str. The github username of the user.
        current_release_version_number: str. The version of current release.
    """
    python_utils.PRINT(
        'Creating new branch with updates to AUTHORS, CONTRIBUTORS, '
        'CHANGELOG and about-page...')
    sb = repo.get_branch('develop')
    repo_fork.create_git_ref(ref='refs/heads/%s' % target_branch,
                             sha=sb.commit.sha)

    for filepath in [
            CHANGELOG_FILEPATH, AUTHORS_FILEPATH, CONTRIBUTORS_FILEPATH,
            ABOUT_PAGE_CONSTANTS_FILEPATH
    ]:
        contents = repo_fork.get_contents(filepath, ref=target_branch)
        with python_utils.open_file(filepath, 'r') as f:
            repo_fork.update_file(contents.path,
                                  'Update %s' % filepath,
                                  f.read(),
                                  contents.sha,
                                  branch=target_branch)
    common.run_cmd(GIT_CMD_CHECKOUT.split(' '))
    common.open_new_tab_in_browser_if_possible(
        'https://github.com/oppia/oppia/compare/develop...%s:%s?'
        'expand=1&title=Update authors and changelog for v%s' %
        (github_username, target_branch, current_release_version_number))
    python_utils.PRINT('Pushed changes to Github. '
                       'Please create a pull request from the %s branch\n\n'
                       'Note: PR title should be exactly: '
                       '"Update authors and changelog for v%s" '
                       'otherwise deployment will fail.' %
                       (target_branch, current_release_version_number))
示例#12
0
def run_webdriver_manager(parameters):
    """Run commands of webdriver manager.

    Args:
        parameters: list(str). A list of parameters to pass to webdriver
            manager.
    """
    web_driver_command = [common.NODE_BIN_PATH, WEBDRIVER_MANAGER_BIN_PATH]
    web_driver_command.extend(parameters)
    python_utils.PRINT(common.run_cmd(web_driver_command))
示例#13
0
def remove_updates_and_delete_branch(repo_fork, target_branch):
    """Remove changes made to AUTHORS, CHANGELOG, CONTRIBUTORS
    and about-page and delete the branch created with these changes.

    Args:
        repo_fork: github.Repository.Repository. The PyGithub object for the
            forked repo.
        target_branch: str. The name of the target branch.
    """

    common.run_cmd(GIT_CMD_CHECKOUT.split(' '))
    # The get_git_ref code is wrapped in try except block since the
    # function raises an exception if the target branch is not found.
    try:
        repo_fork.get_git_ref('heads/%s' % target_branch).delete()
    except github.UnknownObjectException:
        pass
    except Exception:
        raise Exception('Please ensure that %s branch is deleted before '
                        're-running the script' % target_branch)
示例#14
0
def initiate_backup_restoration_process():
    """Initiate the backup restoration process on backup migration server."""
    common.open_new_tab_in_browser_if_possible(LIST_OF_BUCKETS_URL)
    python_utils.PRINT('Navigate into the newest backup folder. \n'
                       'There should be a file here of the form '
                       '<date_time>.overall_export_metadata. \n'
                       'For example, "<folder-name>/20200213-090001/'
                       '20200213-090001.overall_export_metadata". '
                       'This is the file you want to import.\n'
                       'Please copy and enter the full path of this file\n')
    export_metadata_filepath = python_utils.INPUT()
    if not re.match(
            r'^oppia-export-backups/(\d{8}-\d{6})/\1\.overall_export_metadata$',
            export_metadata_filepath):
        raise Exception('Invalid export metadata filepath: %s' %
                        (export_metadata_filepath))
    common.run_cmd([
        GCLOUD_PATH, 'datastore', 'import',
        'gs://%s' % export_metadata_filepath, '--async'
    ])
示例#15
0
def cancel_operation():
    """Cancels a datastore operation."""
    python_utils.PRINT('Cancellation of operation may corrupt the datastore. '
                       'Refer: https://cloud.google.com/datastore/docs/'
                       'export-import-entities#cancel_an_operation\n'
                       'Do you want to continue?\n')
    execute_cancellation = python_utils.INPUT().lower()
    if execute_cancellation not in release_constants.AFFIRMATIVE_CONFIRMATIONS:
        python_utils.PRINT('Aborting Cancellation.')
        return

    python_utils.PRINT('List of operations in progress:\n')
    check_backup_restoration_status()
    python_utils.PRINT(
        'Enter the name of the operation to cancel from the above list. '
        'The name of an operation is listed in the field called "name". '
        'Check the example here: https://stackoverflow.com/a/53630367 for '
        'details.\n')
    operation_name = python_utils.INPUT()
    common.run_cmd(
        [GCLOUD_PATH, 'datastore', 'operations', 'cancel', operation_name])
示例#16
0
def _git_diff_names_only(left, right='HEAD'):
    """Get names of changed files from git.

    Args:
        left: str. Lefthand timepoint.
        right: str. rightand timepoint.

    Returns:
        list(str): List of files that are different between the two points.
    """
    diff_cmd = (GIT_CMD_DIFF_NAMES_ONLY_FORMAT_STRING % (left, right))
    return common.run_cmd(diff_cmd.split(' ')).splitlines()
示例#17
0
def get_chrome_version():
    """Get the current version of Chrome.

    Note that this only works on Linux systems. On macOS, for example,
    the `google-chrome` command may not work.

    Returns:
        str. The version of Chrome we found.
    """
    output = str(common.run_cmd(['google-chrome', '--version']))
    chrome_version = ''.join(re.findall(r'([0-9]|\.)', output))
    return chrome_version
示例#18
0
def did_supported_audio_languages_change(remote_alias,
                                         previous_release_version):
    """Checks changes in constants.SUPPORTED_AUDIO_LANGUAGES between
    the current and previous release.

    Args:
        remote_alias: str. The alias for Oppia repo.
        previous_release_version: str. The version of the previous release.

    Returns:
        bool. Whether supported audio languages have changed.
    """
    try:
        from constants import constants
        supported_audio_language_ids_for_current_release = [
            lang_dict['id']
            for lang_dict in constants['SUPPORTED_AUDIO_LANGUAGES']
        ]

        common.run_cmd([
            'git', 'checkout',
            '%s/release-%s' % (remote_alias, previous_release_version), '--',
            'assets/constants.ts'
        ])
        from constants import constants
        supported_audio_language_ids_for_previous_release = [
            lang_dict['id']
            for lang_dict in constants['SUPPORTED_AUDIO_LANGUAGES']
        ]
    finally:
        common.run_cmd(['git', 'reset', 'assets/constants.ts'])
        common.run_cmd(['git', 'checkout', '--', 'assets/constants.ts'])

    return (sorted(supported_audio_language_ids_for_current_release) !=
            sorted(supported_audio_language_ids_for_previous_release))
def _get_changed_filenames_since_tag(release_tag_to_diff_against):
    """Get names of changed files from git since a given release.

    Args:
        release_tag_to_diff_against: str. The release tag to diff against.

    Returns:
        list(str). List of filenames for files that have been modified since
        the release against which diff is being checked.
    """
    diff_cmd = (GIT_CMD_DIFF_NAMES_ONLY_FORMAT_STRING %
                (release_tag_to_diff_against, 'HEAD'))
    return common.run_cmd(diff_cmd.split(' ')).splitlines()
示例#20
0
def main(personal_access_token):
    """Updates the files corresponding to LOCAL_FECONF_PATH and
    LOCAL_CONSTANTS_PATH after doing the prerequisite checks.

    Args:
        personal_access_token: str. The personal access token for the
            GitHub id of user.
    """
    # Do prerequisite checks.
    common.require_cwd_to_be_oppia()
    assert common.is_current_branch_a_release_branch(), (
        'Current branch is not a release branch_name')
    common.ensure_release_scripts_folder_exists_and_is_up_to_date()
    try:
        python_utils.url_open(TERMS_PAGE_URL)
    except Exception:
        raise Exception('Terms mainpage does not exist on Github.')

    try:
        check_updates_to_terms_of_service(personal_access_token)
        add_mailgun_api_key()

        apply_changes_based_on_config(
            LOCAL_FECONF_PATH, FECONF_CONFIG_PATH, FECONF_REGEX)
        apply_changes_based_on_config(
            LOCAL_CONSTANTS_PATH, CONSTANTS_CONFIG_PATH, CONSTANTS_REGEX)
    except Exception as e:
        common.run_cmd([
            'git', 'checkout', '--', LOCAL_FECONF_PATH, LOCAL_CONSTANTS_PATH])
        raise Exception(e)

    common.ask_user_to_confirm(
        'Done! Please check feconf.py and assets/constants.ts to ensure that '
        'the changes made are correct. Specifically verify that the '
        'MAILGUN_API_KEY and REDISHOST are updated correctly and '
        'other config changes are corresponding to %s and %s.\n' % (
            FECONF_CONFIG_PATH, CONSTANTS_CONFIG_PATH))
示例#21
0
def build_js_files(dev_mode_setting):
    """Build the javascript files.

    Args:
        dev_mode_setting: bool. Represents whether to run the related commands
        in dev mode.
    """
    update_dev_mode_in_constants_js(CONSTANT_FILE_PATH, dev_mode_setting)
    if not dev_mode_setting:
        python_utils.PRINT('  Generating files for production mode...')
    else:
        # The 'hashes.json' file is used by the `url-interpolation` service.
        if not os.path.isfile(HASHES_FILE_PATH):
            with python_utils.open_file(HASHES_FILE_PATH, 'w') as hash_file:
                hash_file.write('{}')
        try:
            common.run_cmd([
                common.NODE_BIN_PATH, WEBPACK_BIN_PATH, '--config',
                'webpack.dev.config.ts'
            ])
        except subprocess.CalledProcessError as error:
            python_utils.PRINT(error.output)
            sys.exit(error.returncode)
    build.main(args=(['--prod_env'] if not dev_mode_setting else []))
示例#22
0
def gather_logs(start, stop='HEAD'):
    """Gathers the logs between the start and endpoint.

    Args:
        start: str. Tag, Branch or SHA1 of start point
        stop: str.  Tag, Branch or SHA1 of end point, defaults to HEAD

    Returns:
        list(Log): List of Logs.
    """
    get_logs_cmd = GIT_CMD_GET_LOGS_FORMAT_STRING.format(
        GROUP_SEP, start, stop)
    # The unicode conversion is required because there can be non-ascii
    # characters in the logs and it can result in breaking the flow
    # of release summary generation.
    out = python_utils.UNICODE(common.run_cmd(get_logs_cmd.split(' ')),
                               'utf-8').split('\x00')
    if len(out) == 1 and out[0] == '':
        return []
    else:
        return [Log(*line.strip().split(GROUP_SEP)) for line in out]
示例#23
0
def check_versions(current_release):
    """Checks if the versions for the exploration or collection schemas have
    changed.

    Args:
        current_release: str. The current release tag to diff against.

    Returns:
        List of variable names that changed.
    """
    feconf_changed_version = []
    git_show_cmd = (GIT_CMD_SHOW_FORMAT_STRING % current_release)
    old_feconf = common.run_cmd(git_show_cmd.split(' '))
    with python_utils.open_file(FECONF_FILEPATH, 'r') as feconf_file:
        new_feconf = feconf_file.read()
    for variable in FECONF_VAR_NAMES:
        old_version = re.findall(VERSION_RE_FORMAT_STRING % variable,
                                 old_feconf)[0]
        new_version = re.findall(VERSION_RE_FORMAT_STRING % variable,
                                 new_feconf)[0]
        if old_version != new_version:
            feconf_changed_version.append(variable)
    return feconf_changed_version
示例#24
0
def get_extra_commits_in_new_release(base_commit, repo):
    """Gets extra commits in the new release.

    Args:
        base_commit: str. The base commit common between current branch and the
            latest release.
        repo: github.Repository.Repository. The PyGithub object for the repo.

    Returns:
        list(github.Commit.Commit). List of commits from the base commit up to
        the current commit, which haven't been cherrypicked already.
    """
    get_commits_cmd = GIT_CMD_TEMPLATE_GET_NEW_COMMITS % base_commit
    out = common.run_cmd(get_commits_cmd.split(' ')).split('\n')
    commits = []
    for line in out:
        # Lines that start with a - are already cherrypicked. The commits of
        # interest are on lines that start with +.
        if line[0] == '+':
            line = line[2:]
            commit = repo.get_commit(line[:line.find(' ')])
            commits.append(commit)
    return commits
def get_changed_schema_version_constant_names(release_tag_to_diff_against):
    """Returns a list of schema version constant names in feconf that have
    changed since the release against which diff is being checked.

    Args:
        release_tag_to_diff_against: str. The release tag to diff against.

    Returns:
        list(str). List of version constant names in feconf that changed.
    """
    changed_version_constants_in_feconf = []
    git_show_cmd = (GIT_CMD_SHOW_FORMAT_STRING % release_tag_to_diff_against)
    old_feconf = common.run_cmd(git_show_cmd.split(' '))
    with python_utils.open_file(FECONF_FILEPATH, 'r') as feconf_file:
        new_feconf = feconf_file.read()
    for version_constant in FECONF_SCHEMA_VERSION_CONSTANT_NAMES:
        old_version = re.findall(VERSION_RE_FORMAT_STRING % version_constant,
                                 old_feconf)[0]
        new_version = re.findall(VERSION_RE_FORMAT_STRING % version_constant,
                                 new_feconf)[0]
        if old_version != new_version:
            changed_version_constants_in_feconf.append(version_constant)
    return changed_version_constants_in_feconf
示例#26
0
def check_backup_restoration_status():
    """Checks the status of backup restoration process."""
    python_utils.PRINT(
        common.run_cmd([GCLOUD_PATH, 'datastore', 'operations', 'list']))
示例#27
0
def execute_deployment():
    """Executes the deployment process after doing the prerequisite checks.

    Raises:
        Exception: App name is invalid.
        Exception: Custom version is used with production app.
        Exception: App name is not specified.
        Exception: The deployment script is not run from a release or test
            branch.
        Exception: The deployment script is run for prod server from a test
            branch.
        Exception: Current release version has '.' character.
        Exception: Last commit message is invalid.
        Exception: The mailgun API key is not added before deployment.
        Exception: Could not find third party directory.
        Exception: Invalid directory accessed during deployment.
    """
    parsed_args = _PARSER.parse_args()
    custom_version = None
    if parsed_args.app_name:
        app_name = parsed_args.app_name
        if app_name not in [APP_NAME_OPPIASERVER, APP_NAME_OPPIATESTSERVER
                            ] and ('migration' not in app_name):
            raise Exception('Invalid app name: %s' % app_name)
        if parsed_args.version and app_name == APP_NAME_OPPIASERVER:
            raise Exception('Cannot use custom version with production app.')
        # Note that custom_version may be None.
        custom_version = parsed_args.version
    else:
        raise Exception('No app name specified.')

    current_branch_name = common.get_current_branch_name()

    release_dir_name = 'deploy-%s-%s-%s' % (
        '-'.join('-'.join(app_name.split('.')).split(':')),
        current_branch_name, CURRENT_DATETIME.strftime('%Y%m%d-%H%M%S'))
    release_dir_path = os.path.join(os.getcwd(), '..', release_dir_name)

    deploy_data_path = os.path.join(os.getcwd(), os.pardir, 'release-scripts',
                                    'deploy_data', app_name)

    install_third_party_libs.main()

    if not (common.is_current_branch_a_release_branch() or
            (common.is_current_branch_a_test_branch())):
        raise Exception(
            'The deployment script must be run from a release or test branch.')
    if common.is_current_branch_a_test_branch() and (app_name in [
            APP_NAME_OPPIASERVER, APP_NAME_OPPIATESTSERVER
    ]):
        raise Exception('Test branch can only be deployed to backup server.')
    if custom_version is not None:
        current_release_version = custom_version.replace(DOT_CHAR, HYPHEN_CHAR)
    else:
        current_release_version = current_branch_name[
            len(common.RELEASE_BRANCH_NAME_PREFIX):].replace(
                DOT_CHAR, HYPHEN_CHAR)

    # This is required to compose the release_version_library_url
    # (defined in switch_version function) correctly.
    if '.' in current_release_version:
        raise Exception('Current release version has \'.\' character.')

    # Do prerequisite checks.
    common.require_cwd_to_be_oppia()
    common.ensure_release_scripts_folder_exists_and_is_up_to_date()
    gcloud_adapter.require_gcloud_to_be_available()
    try:
        if app_name == APP_NAME_OPPIASERVER:
            create_release_doc()
            release_version_number = common.get_current_release_version_number(
                current_branch_name)
            last_commit_message = subprocess.check_output(
                'git log -1 --pretty=%B'.split())
            if not last_commit_message.startswith(
                    'Update authors and changelog for v%s' %
                (release_version_number)):
                raise Exception('Invalid last commit message: %s.' %
                                last_commit_message)

            check_travis_and_circleci_tests(current_branch_name)

            personal_access_token = common.get_personal_access_token()
            g = github.Github(personal_access_token)
            repo = g.get_organization('oppia').get_repo('oppia')
            common.check_blocking_bug_issue_count(repo)
            common.check_prs_for_current_release_are_released(repo)
            update_configs.main(personal_access_token)
            with python_utils.open_file(FECONF_PATH, 'r') as f:
                feconf_contents = f.read()
                if ('MAILGUN_API_KEY' not in feconf_contents
                        or 'MAILGUN_API_KEY = None' in feconf_contents):
                    raise Exception(
                        'The mailgun API key must be added before deployment.')
        if not os.path.exists(THIRD_PARTY_DIR):
            raise Exception(
                'Could not find third_party directory at %s. Please run '
                'install_third_party_libs.py prior to running this script.' %
                THIRD_PARTY_DIR)

        current_git_revision = subprocess.check_output(
            ['git', 'rev-parse', 'HEAD']).strip()

        # Create a folder in which to save the release candidate.
        python_utils.PRINT('Ensuring that the release directory parent exists')
        common.ensure_directory_exists(os.path.dirname(release_dir_path))

        # Copy files to the release directory. Omits the .git subfolder.
        python_utils.PRINT('Copying files to the release directory')
        shutil.copytree(os.getcwd(),
                        release_dir_path,
                        ignore=shutil.ignore_patterns('.git'))

        # Change the current directory to the release candidate folder.
        with common.CD(release_dir_path):
            if not os.getcwd().endswith(release_dir_name):
                raise Exception(
                    'Invalid directory accessed during deployment: %s' %
                    os.getcwd())

            python_utils.PRINT('Changing directory to %s' % os.getcwd())

            python_utils.PRINT('Preprocessing release...')
            preprocess_release(app_name, deploy_data_path)

            update_and_check_indexes(app_name)
            build_scripts()
            deploy_application_and_write_log_entry(app_name,
                                                   current_release_version,
                                                   current_git_revision)

            python_utils.PRINT('Returning to oppia/ root directory.')

        switch_version(app_name, current_release_version)
        flush_memcache(app_name)
        check_breakage(app_name, current_release_version)

        python_utils.PRINT('Done!')
    finally:
        common.run_cmd([
            'git', 'checkout', '--', update_configs.LOCAL_FECONF_PATH,
            update_configs.LOCAL_CONSTANTS_PATH
        ])
示例#28
0
def main():
    """ add_nodes main loop """
    cfgs = defaultdict(str)

    # handle parser option
    options = get_options()
    if not options.nodes:
        err_m('Must specifiy the node names using \'--nodes\' option')

    # get node list from user input
    new_nodes = expNumRe(options.nodes)
    if not new_nodes:
        err_m('Incorrect format')

    if options.pwd:
        pwd = getpass.getpass('Input remote host SSH Password: '******''

    u = UserInput(options, pwd)
    g = lambda n: u.get_input(n, cfgs[n], prompt_mode=prompt_mode)

    format_output('Trafodion Elastic Add Nodes Script')

    ### read configs from current trafodion_config and save it to cfgs
    if os.path.exists(TRAF_CFG_FILE):
        with open(TRAF_CFG_FILE, 'r') as f:
            traf_cfgs = f.readlines()
        for traf_cfg in traf_cfgs:
            if not traf_cfg.strip(): continue
            key, value = traf_cfg.replace('export ', '').split('=')
            value = value.replace('"', '')
            value = value.replace('\n', '')
            cfgs[key.lower()] = value
    else:
        err_m(
            'Cannot find %s, be sure to run this script on one of trafodion nodes'
            % TRAF_CFG_FILE)

    ### config check
    if not cfgs['hbase_lib_path'] or not cfgs['traf_version']:
        err_m('Missing parameters in Trafodion config file')

    if not cfgs['traf_home'] or not cmd_output(
            '%s ls %s' % (get_sudo_prefix(), cfgs['traf_home'])):
        err_m('Cannot find trafodion binary folder')
    # get trafodion user from traf_home path
    cfgs['traf_user'] = cfgs['traf_home'].split('/')[-2]
    if not cfgs['traf_user']:
        err_m('Cannot detect trafodion user')

    ### parse trafodion user's password
    cfgs['traf_shadow'] = cmd_output(
        "%s grep %s /etc/shadow |awk -F: '{print $2}'" %
        (get_sudo_prefix(), cfgs['traf_user']))

    def copy_files():
        # package trafodion binary into a tar file
        if not os.path.exists(TRAF_PKG_FILE):
            info(
                'Creating trafodion packages of %s, this will take a while ...'
                % cfgs['traf_home'])
            run_cmd_as_user(
                cfgs['traf_user'],
                'cd %s; tar czf %s ./* --exclude logs/* --exclude core.* --exclude tmp/*'
                % (cfgs['traf_home'], TRAF_PKG_FILE))
        else:
            info('Using existing trafodion package %s' % TRAF_PKG_FILE)

        info(
            'Copying trafodion files to new nodes, this will take a while ...')
        run_cmd('%s cp -rf %s/../.ssh /tmp' %
                (get_sudo_prefix(), cfgs['traf_home']))
        run_cmd('%s chmod -R 755 /tmp/.ssh' % get_sudo_prefix())
        traf_ssh_folder = '/tmp/.ssh'

        hbase_trx_file = cmd_output('ls %s/hbase-trx-*' %
                                    cfgs['hbase_lib_path'])
        trafodion_utility_file = cmd_output('ls %s/trafodion-utility-*' %
                                            cfgs['hbase_lib_path'])

        files = [
            TRAF_CFG_FILE, TRAF_PKG_FILE, traf_ssh_folder, hbase_trx_file,
            trafodion_utility_file
        ]

        remote_insts = [Remote(h, pwd=pwd) for h in new_nodes]
        threads = [
            Thread(target=r.copy, args=(files, '/tmp')) for r in remote_insts
        ]
        for thread in threads:
            thread.start()
        for thread in threads:
            thread.join()

        for r in remote_insts:
            if r.rc != 0: err_m('Failed to copy files to %s' % r.host)

    ### copy trafodion_config/trafodion-package/hbase-trx to the new nodes
    copy_files()

    ### set parameters
    if cfgs['enable_ha'].upper() == 'true':
        g('dcs_backup_nodes')
        cfgs['dcs_ha'] = 'Y'
    else:
        cfgs['dcs_ha'] = 'N'

    if cfgs['trafodion_enable_authentication'] == 'YES':
        cfgs['ldap_security'] = 'Y'
    else:
        cfgs['ldap_security'] = 'N'

    if cfgs['secure_hadoop'].upper() == 'Y':
        g('kdc_server')
        g('admin_principal')
        g('kdcadmin_pwd')

    #TODO: offline support
    cfgs['offline_mode'] = 'N'

    format_output('AddNode sub scripts Start')

    ### run addNode script on new nodes ###
    cfgs['node_list'] = ','.join(new_nodes)
    info('Running add node setup on new node(s) [%s] ...' % cfgs['node_list'])
    wrapper.run(cfgs, options, mode='addnodes_new', pwd=pwd)

    ### run dcs setup script on all nodes ###
    # get current trafodion node list
    current_nodes = cmd_output('%s su - %s -c "trafconf -name 2>/dev/null"' %
                               (get_sudo_prefix(), cfgs['traf_user'])).split()
    all_nodes = list(set(new_nodes + current_nodes))
    cfgs['node_list'] = ','.join(all_nodes)
    info('Running dcs setup on all node(s) [%s] ...' % cfgs['node_list'])
    wrapper.run(cfgs, options, mode='addnodes_all', pwd=pwd)

    ### do sqshell node add/up, sqregen
    # check if trafodion is running
    mon_process = cmd_output('ps -ef|grep -v grep|grep -c "monitor COLD"')
    if int(mon_process) > 0:
        info('Trafodion instance is up, adding node in sqshell ...')

        # cores=0-1;processors=2;roles=connection,aggregation,storage
        sqconfig_ptr = cmd_output(
            '%s su - %s -c "trafconf -node|sed -n 2p|cut -d\\\";\\\" -f3-5"' %
            (get_sudo_prefix(), cfgs['traf_user']))
        for node in new_nodes:
            info('adding node [%s] in sqshell ...' % node)
            run_cmd_as_user(
                cfgs['traf_user'],
                'echo "node add {node-name %s,%s}" | sqshell -a' %
                (node, sqconfig_ptr))
            run_cmd_as_user(cfgs['traf_user'],
                            'echo "node up %s" | sqshell -a' % node)
            ok('Node [%s] added!' % node)

        info('Starting DCS on new nodes ...')
        run_cmd_as_user(cfgs['traf_user'], 'dcsstart')
    else:
        info('Trafodion instance is not up, do sqgen ...')
        run_cmd_as_user(cfgs['traf_user'],
                        'rm %s/sqconfig.db' % cfgs['traf_var'])
        run_cmd_as_user(cfgs['traf_user'], 'sqgen')
        ok('Setup completed. You need to start trafodion manually')

    ### clean up
    run_cmd('%s rm -rf /tmp/.ssh' % get_sudo_prefix())
    run_cmd('%s rm -rf %s' % (get_sudo_prefix(), TRAF_PKG_FILE))

    format_output('AddNode Complete')
    info(
        'NOTICE: You need to manually restart RegionServer on newly added nodes to take effect'
    )
示例#29
0
def load_diff(base_branch):
    """Load the diff between the head and base.

    Only determine the diffs for files in FILES_THAT_NEED_DIFFS. Other
    files will be listed as having changed, but their diff lines will
    not be returned.

    Args:
        base_branch: str. Base branch of PR.

    Returns:
        tuple(list(tuple(str, str)), dict(str, list(str)). Tuple of a
        list of changed files (each a tuple of before, after) and a
        dictionary mapping from file names to list of diff lines for
        each file. In the event of a parsing error, a tuple of an empty
        list and empty dictionary is returned.
    """
    diff_name_status = common.run_cmd([
        'git',
        'diff',
        '--name-status',
        '{}/{}'.format(UPSTREAM_REMOTE, base_branch),
    ])
    diff_files = []
    for line in diff_name_status.split('\n'):
        if not line:
            continue
        split = line.split()
        if len(split) < 2 or len(split) > 3:
            python_utils.PRINT('Failed to parse diff --name-status line "%s"' %
                               line)
            return [], {}
        elif len(split) == 2:
            diff_files.append((split[1], split[1]))
        elif len(split) == 3:
            diff_files.append((split[1], split[2]))
    file_diffs = {}
    for file_tuple in diff_files:
        for filename in file_tuple:
            if filename in file_diffs:
                # Don't re-generate a diff we already have.
                continue
            if filename not in FILES_THAT_NEED_DIFFS:
                continue
            file_diff = common.run_cmd([
                'git',
                'diff',
                '-U0',
                '{}/{}'.format(UPSTREAM_REMOTE, base_branch),
                '--',
                filename,
            ])
            file_diff_split = file_diff.rstrip().split('\n')
            i = 0
            # Find the end of the diff header. See
            # https://git-scm.com/docs/diff-format for details on the
            # git diff format.
            for line in file_diff_split:
                i += 1
                if line.startswith('@@'):
                    break
            if i == len(file_diff_split):
                # We reached the end of the diff without finding the
                # header, or the header consumes the entire diff.
                python_utils.PRINT(
                    'Failed to find end of header in "%s" diff' % filename)
                return [], {}
            file_diffs[filename] = file_diff_split[i:]
    return diff_files, file_diffs
示例#30
0
def main():
    """ add_nodes main loop """
    cfgs = defaultdict(str)

    # handle parser option
    options = get_options()
    if not options.nodes:
        err_m('Must specifiy the node names using \'--nodes\' option')

    # get node list from user input
    new_nodes = expNumRe(options.nodes)
    if not new_nodes:
        err_m('Incorrect format')

    if options.pwd:
        pwd = getpass.getpass('Input remote host SSH Password: '******''

    u = UserInput(options, pwd)
    g = lambda n: u.get_input(n, cfgs[n], prompt_mode=prompt_mode)

    format_output('Trafodion Elastic Add Nodes Script')

    ### read configs from current trafodion_config and save it to cfgs
    if os.path.exists(TRAF_CFG_FILE):
        with open(TRAF_CFG_FILE, 'r') as f:
            traf_cfgs = f.readlines()
        for traf_cfg in traf_cfgs:
            if not traf_cfg.strip(): continue
            key, value = traf_cfg.replace('export ', '').split('=')
            value = value.replace('"','')
            value = value.replace('\n','')
            cfgs[key.lower()] = value
    else:
        err_m('Cannot find %s, be sure to run this script on one of trafodion nodes' % TRAF_CFG_FILE)

    ### config check
    if not cfgs['hbase_lib_path'] or not cfgs['traf_version']:
        err_m('Missing parameters in Trafodion config file')

    if not cfgs['traf_home'] or not cmd_output('%s ls %s' % (get_sudo_prefix(), cfgs['traf_home'])):
        err_m('Cannot find trafodion binary folder')
    # get trafodion user from traf_home path
    cfgs['traf_user'] = cfgs['traf_home'].split('/')[-2]
    if not cfgs['traf_user']:
        err_m('Cannot detect trafodion user')

    ### parse trafodion user's password
    cfgs['traf_shadow'] = cmd_output("%s grep %s /etc/shadow |awk -F: '{print $2}'" % (get_sudo_prefix(), cfgs['traf_user']))

    def copy_files():
        # package trafodion binary into a tar file
        if not os.path.exists(TRAF_PKG_FILE):
            info('Creating trafodion packages of %s, this will take a while ...' % cfgs['traf_home'])
            run_cmd_as_user(cfgs['traf_user'], 'cd %s; tar czf %s ./* --exclude logs/* --exclude core.* --exclude tmp/*' % (cfgs['traf_home'], TRAF_PKG_FILE))
        else:
            info('Using existing trafodion package %s' % TRAF_PKG_FILE)

        info('Copying trafodion files to new nodes, this will take a while ...')
        run_cmd('%s cp -rf %s/../.ssh /tmp' % (get_sudo_prefix(), cfgs['traf_home']))
        run_cmd('%s chmod -R 755 /tmp/.ssh' % get_sudo_prefix())
        traf_ssh_folder = '/tmp/.ssh'

        hbase_trx_file = cmd_output('ls %s/hbase-trx-*' % cfgs['hbase_lib_path'])
        trafodion_utility_file = cmd_output('ls %s/trafodion-utility-*' % cfgs['hbase_lib_path'])

        files = [TRAF_CFG_FILE, TRAF_PKG_FILE, traf_ssh_folder, hbase_trx_file, trafodion_utility_file]

        remote_insts = [Remote(h, pwd=pwd) for h in new_nodes]
        threads = [Thread(target=r.copy, args=(files, '/tmp')) for r in remote_insts]
        for thread in threads: thread.start()
        for thread in threads: thread.join()

        for r in remote_insts:
            if r.rc != 0: err_m('Failed to copy files to %s' % r.host)

    ### copy trafodion_config/trafodion-package/hbase-trx to the new nodes
    copy_files()

    ### set parameters
    if cfgs['enable_ha'].upper() == 'true':
        g('dcs_backup_nodes')
        cfgs['dcs_ha'] = 'Y'
    else:
        cfgs['dcs_ha'] = 'N'

    if cfgs['trafodion_enable_authentication'] == 'YES':
        cfgs['ldap_security'] = 'Y'
    else:
        cfgs['ldap_security'] = 'N'

    if cfgs['secure_hadoop'].upper() == 'Y':
        g('kdc_server')
        g('admin_principal')
        g('kdcadmin_pwd')

    #TODO: offline support
    cfgs['offline_mode'] = 'N'


    format_output('AddNode sub scripts Start')

    ### run addNode script on new nodes ###
    cfgs['node_list'] = ','.join(new_nodes)
    info('Running add node setup on new node(s) [%s] ...' % cfgs['node_list'])
    wrapper.run(cfgs, options, mode='addnodes_new', pwd=pwd)

    ### run dcs setup script on all nodes ###
    # get current trafodion node list
    current_nodes = cmd_output('%s su - %s -c "trafconf -name 2>/dev/null"' % (get_sudo_prefix(), cfgs['traf_user'])).split()
    all_nodes = list(set(new_nodes + current_nodes))
    cfgs['node_list'] = ','.join(all_nodes)
    info('Running dcs setup on all node(s) [%s] ...' % cfgs['node_list'])
    wrapper.run(cfgs, options, mode='addnodes_all', pwd=pwd)

    ### do sqshell node add/up, sqregen
    # check if trafodion is running
    mon_process = cmd_output('ps -ef|grep -v grep|grep -c "monitor COLD"')
    if int(mon_process) > 0:
        info('Trafodion instance is up, adding node in sqshell ...')

        # cores=0-1;processors=2;roles=connection,aggregation,storage
        sqconfig_ptr = cmd_output('%s su - %s -c "trafconf -node|sed -n 2p|cut -d\\\";\\\" -f3-5"' % (get_sudo_prefix(), cfgs['traf_user']))
        for node in new_nodes:
            info('adding node [%s] in sqshell ...' % node)
            run_cmd_as_user(cfgs['traf_user'], 'echo "node add {node-name %s,%s}" | sqshell -a' % (node, sqconfig_ptr))
            run_cmd_as_user(cfgs['traf_user'], 'echo "node up %s" | sqshell -a' % node)
            ok('Node [%s] added!' % node)

        info('Starting DCS on new nodes ...')
        run_cmd_as_user(cfgs['traf_user'], 'dcsstart')
    else:
        info('Trafodion instance is not up, do sqgen ...')
        run_cmd_as_user(cfgs['traf_user'], 'rm %s/sql/scripts/sqconfig.db' % cfgs['traf_home'])
        run_cmd_as_user(cfgs['traf_user'], 'sqgen')
        ok('Setup completed. You need to start trafodion manually')

    ### clean up
    run_cmd('%s rm -rf /tmp/.ssh' % get_sudo_prefix())
    run_cmd('%s rm -rf %s' % (get_sudo_prefix(), TRAF_PKG_FILE))

    format_output('AddNode Complete')
    info('NOTICE: You need to manually restart RegionServer on newly added nodes to take effect')