Пример #1
0
class HaloFeedback(Feedback):
    def __init__(self, message=None):
        self.spinner = Halo(text=message or '', spinner='dots')
        if message and not default_config.debug:
            self.spinner.start()

    def update_message(self, message):
        super().update_message(message)
        if not self.spinner._spinner_id and not default_config.debug:
            self.spinner.start()
        self.spinner.text = (message + ' ...') if self.message else ''

    def succeeded(self):
        self.spinner.text = self.message
        self.spinner.succeed()

    def errored(self, error):
        # self.spinner.text = str(error) if error else self.message
        self.spinner.text = f'{self.message} ... {colored(str(error), "red")}'
        self.spinner.fail()
        sys.exit(1)

    def warning(self, warning):
        self.spinner.text = f'{self.message} ... {colored(str(warning), "yellow")}'
        self.spinner.warn()

    def info(self, message):
        self.spinner.info(message)

    def exists(self, error):
        self.warning('exists')
Пример #2
0
    def _check_for_updates_with_appimageupdatetool(self,
                                                   path_appimageupdate,
                                                   show_spinner=True):
        path_to_old_appimage = self.appdata().get('path')

        if show_spinner:
            spinner = Halo('Checking for updates', spinner='dots')
            spinner.start()

        _check_update_command = shlex.split(
            "{au} --check-for-update {app}".format(
                au=path_appimageupdate,
                app=path_to_old_appimage,
            ))
        _check_update_proc = subprocess.Popen(_check_update_command,
                                              stdout=subprocess.PIPE,
                                              stderr=subprocess.PIPE)
        e_code = _check_update_proc.wait(600)
        if e_code == 0:
            if show_spinner:
                spinner.succeed("Already up-to-date!")
            return
        elif e_code == 1:
            if show_spinner:
                spinner.info("Updates found")
        else:
            if show_spinner:
                spinner.fail("Update information is not embedded within the "
                             "AppImage. ")
                spinner.fail("Consider informing the AppImage author to add a "
                             ".zsync file")
                spinner.fail(
                    "Alternatively, pass the --no-appimageupdate option")
                spinner.stop()
Пример #3
0
def populate_locations(countries):
    spinner = Halo(spinner='dots')
    spinner.info(f'Posting {len(countries.index)} locations from ISO-3166...')

    for index, country in countries.iterrows():
        newLocation = Location(country['countryCode'], country['countryName'])

        _ = prepare_resource('locations', newLocation.countryCode,
                             'findByCountryCode', newLocation.serialize())
Пример #4
0
    def test_info(self):
        """Test info method
        """
        spinner = Halo(stream=self._stream)
        spinner.start('foo')
        spinner.info()

        output = self._get_test_output()['text']
        pattern = re.compile(r'(ℹ|¡) foo', re.UNICODE)

        self.assertRegexpMatches(output[-1], pattern)
        spinner.stop()
Пример #5
0
def mint_used_ids(mirids_file_name, mirid_controller_url):
    spinner = Halo(spinner='dots')
    spinner.info('Minting used mirids from old database in the new one...')

    with open(mirids_file_name) as mirids_file:
        lines = mirids_file.read().splitlines()

    miriam_array = []

    for mirid in lines:
        if len(mirid) != 12:
            spinner.fail(f'Invalid miriam: [{mirid}]')

        else:
            miriam_array.append(int(mirid.split(':')[1]))

    first_miriam = miriam_array[0]
    last_miriam = miriam_array[-1]

    spinner.info(f'Found [{len(miriam_array)}] MIR ids, spanning from [{first_miriam}] to [{last_miriam}]...')

    spinner.start(f'Launching {last_miriam} async load requests...')

    loop = asyncio.get_event_loop()
    future = asyncio.ensure_future(async_mint(first_miriam, last_miriam, f'{mirid_controller_url}loadId/'))
    loop.run_until_complete(future)

    spinner.succeed(f'Done.')


    spinner = Halo(spinner='dots')
    spinner.info('Returning mirids in gaps between used ones...')

    unused_miriams = list(find_empty_ranges(miriam_array))

    spinner.info(f'Found [{len(unused_miriams)}] empty MIR id blocks: {unused_miriams}')

    for empty_block in unused_miriams:
        spinner.info(f'Returning MIR ids in range [{empty_block}]')

        return_miriam_array = []

        for mir_id in range(empty_block[0], empty_block[1]):
            return_miriam_array.append(mir_id)

        spinner.start(f'Launching {len(return_miriam_array)} async return requests...')

        future = asyncio.ensure_future(async_mint(return_miriam_array[0], return_miriam_array[-1], f'{mirid_controller_url}returnId/'))
        loop.run_until_complete(future)

        spinner.succeed(f'Done!')
def submit_job(code_url, cpus, memory_mb, max_runtime_secs):
    job_spec = {
        "code_url": code_url,

        # TODO: Later, when we add support for resource_requirements, test some real values for this
        "resource_requirements": {
            "cpus": cpus,
            "memory_mb": memory_mb,
            "max_runtime_secs": max_runtime_secs
        }
    }

    print("Submitting job...")
    res = requests.post(submit_job_url, json=job_spec)
    if res.status_code != 200:
        print("Could not submit job!")
        sys.exit(1)

    resp = res.json()
    if not resp.get("success", False):
        print("Could not submit job!")
        sys.exit(1)

    job_id = resp['job_id']

    job_status_code = None

    spinner = Halo(text="Waiting for job updates", spinner='dots')
    spinner.start()

    while True:
        try:
            new_job_status_code = get_job_status(job_id)
            if new_job_status_code != job_status_code:
                if new_job_status_code in ("UNASSIGNED", "ASSIGNED"):
                    spinner.info(STATUS_CODE_MESSAGES[new_job_status_code])
                    spinner.start("Waiting for job updates")
                elif new_job_status_code == "FAILED":
                    spinner.fail(STATUS_CODE_MESSAGES[new_job_status_code])
                    spinner.start("Waiting for job updates")
                else:
                    # the job has succeeded
                    spinner.stop_and_persist(
                        symbol='🦄'.encode('utf-8'),
                        text=STATUS_CODE_MESSAGES[new_job_status_code])
                    break
                job_status_code = new_job_status_code
            time.sleep(JOB_STATUS_POLL_INTERVAL_SECS)
        except (KeyboardInterrupt, SystemExit):
            spinner.stop()
            break
Пример #7
0
def getDvScore(path_to_file):
    spinner = Halo(text="Checking dv score  of the cleaned data",
                   spinner='dots',
                   text_color="cyan")
    spinner.start()

    file = path_to_file
    url = 'https://dv3.datavalidation.com/api/v2/user/me/list/create_upload_url/'
    params = '?name=' + file + '&email_column_index=0&has_header=0&start_validation=false'
    headers = {'Authorization': 'Bearer ' + DV_API_KEY}
    s = requests.Session()
    a = requests.adapters.HTTPAdapter(max_retries=3)
    s.mount("https://", a)
    res = s.get(url + params, headers=headers)
    upload_csv_url = res.json()
    files = {
        'file': open(file, 'rb')  #open(OUTPUT_DIR_NAME + "/" + file, 'rb')
    }

    list_id = s.post(upload_csv_url, headers=headers, files=files)
    time.sleep(10)
    dv_result_url = 'https://dv3.datavalidation.com/api/v2/user/me/list/' + list_id.json(
    )
    dv_result = s.get(dv_result_url, headers=headers).json()
    while dv_result['status_value'] == 'PRE_VALIDATING':
        dv_result = requests.get(dv_result_url, headers=headers).json()
        spinner.info("Status percent complete: " +
                     str(dv_result['status_percent_complete']))
        time.sleep(5)  # sleep 5 seconds
    try:
        dv_result = requests.get(dv_result_url, headers=headers).json()
        percent = lambda count: round(
            (count / dv_result['subscriber_count']), 2) * 100

        spinner.succeed("Done checking dv score")
        print("The grade summary is: ")
        for score_name, score_value in dv_result['grade_summary'].items():
            print('%-3s : ' % (score_name) + str(percent(score_value)))
    except:
        if (dv_result['subscriber_count'] == 0):
            print("Empty list of emails were sent for dv validation!")
            print("Perhaps no new email to check dv?")
            print("Program terminated")
            return 0
        else:
            print("Something goes wrong")

    spinner.stop()
Пример #8
0
def add_aks(name,force):
    

    '''
    \b
    Example:
    \b
    # Add all AKS clusters from Azure Subscription.
    kubeasy aks add -n all

    \b
    # Add specific AKS clusters from Azure Subscription.
    kubeasy aks add -n <aksCluster>

    '''
   

    spinner = Halo(text=colorama.Fore.GREEN + 'Logging into Azure using Azure CLI..', spinner='dots',color='yellow')
    spinner.start()

    if not login('azure',spinner):
        spinner.fail(colorama.Fore.RED + 'Azure login failed')
        sys.exit(1)

    spinner.stop()

    spinner = Halo(text=colorama.Fore.GREEN + 'Getting Kubernetes Configuration for {}'.format(name), spinner='dots',color='yellow')
    spinner.start()

    if name == 'all':
                 
        for key in get_K8SList('azure'):
         
             if (not _isExist(key) or (_isExist(key) and force)):
                 
                 addConfig(spinner,'azure',key)
             else:
                 spinner.info(colorama.Fore.GREEN + '\"{}\" is already configured for the Kubeasy, Cheers ! '.format(key))

    elif (not _isExist(name) or (_isExist(name) and force)):
        
        addConfig(spinner,'azure',name)

    else:
        
        spinner.info(colorama.Fore.GREEN + '\"{}\" is already configured for the Kubeasy, Cheers !'.format(name))

    spinner.stop()
Пример #9
0
def wait_then_open(url):
    """
    Waits for a bit then opens a URL.  Useful for waiting for a proxy to come up, and then open the URL.
    """
    spinner = Halo(text=colorama.Fore.GREEN +
                   'Opening Kubernetes Dashboard for {} --> {}'.format(
                       get_current_context(), url),
                   spinner='dots',
                   color='yellow')
    spinner.start()
    time.sleep(3)
    webbrowser.open_new_tab(url)
    spinner.info(
        colorama.Fore.GREEN +
        'Running dashboard for {}, Press CTRL+C to stop the port forwarding..'.
        format(get_current_context()))
Пример #10
0
    def _run_test(cls, test_dict: dict):
        """
        A generic method handling the run of both disruptive and non
        disruptive tests.
        """

        spinner = Halo(spinner='dots', text_color='yellow')
        tc_class = test_dict["testClass"]
        volume_type = test_dict["volType"]
        mname = test_dict["moduleName"][:-3]

        tc_log_path = (f"{cls.base_log_path+test_dict['modulePath'][5:-3]}/"
                       f"{volume_type}/{mname}.log")

        # to calculate time spent to execute the test
        start = time.time()

        spinner.succeed(text=f"Running test case : {mname}-{volume_type}")
        runner_thread_obj = RunnerThread(tc_class, cls.param_obj, volume_type,
                                         mname, cls.logger, cls.env_obj,
                                         tc_log_path, cls.log_level)

        test_stats = runner_thread_obj.run_thread()

        test_stats['timeTaken'] = time.time() - start
        test_stats['tcNature'] = test_dict['tcNature']
        spinner.clear()
        result_text = f"{test_dict['moduleName'][:-3]}-{test_dict['volType']}"
        if test_stats['testResult'][0] is True:
            test_stats['testResult'] = "PASS"
            result_text += " PASS"
            spinner = Halo(spinner='dots', text_color='green')
            spinner.succeed(text=f"{mname}-{volume_type} Succeeded")
        elif test_stats['testResult'][0] is False:
            result_text += " FAIL"
            test_stats['testResult'] = "FAIL"
            spinner = Halo(spinner='dots', text_color='red')
            spinner.fail(f"{mname}-{volume_type} Failed")
        else:
            result_text += " SKIP"
            test_stats['testResult'] = "SKIP"
            spinner = Halo(spinner='dots', text_color='cyan')
            spinner.info(f"{mname}-{volume_type} SKIP")
        test_stats['component'] = tc_log_path.split('/')[-4]

        result_value = {test_dict["moduleName"][:-3]: test_stats}
        cls.job_result_queue.put(result_value)
Пример #11
0
def add_gke(name,force):
    
    '''
    \b
    Example:
    \b
    # Add all GKE clusters .
    kubeasy gke add -n all

    \b
    # Add specific GKE clusters.
    kubeasy gke add -n <gkeCluster>
    '''

    spinner = Halo(text=colorama.Fore.GREEN + 'Logging into Google Cloud using gcloud ..', spinner='dots',color='yellow')
    spinner.start()

    if not login('google',spinner):
        spinner.fail(colorama.Fore.RED + 'Google Cloud login failed')
        sys.exit(1)
    
    spinner.stop()

    spinner = Halo(text=colorama.Fore.GREEN + 'Getting Kubernetes Configuration for {}'.format(name), spinner='dots',color='yellow')
    
    spinner.start()

    if name == 'all':
                 
        for key in get_K8SList('google'):
         
             if (not _isExist(key) or (_isExist(key) and force)):
                 
                 addConfig(spinner,'google',key)
             else:
                 spinner.info(colorama.Fore.GREEN + '\"{}\" is already configured for the Kubeasy, Cheers ! '.format(key))

    elif (not _isExist(name) or (_isExist(name) and force)):
        
        addConfig(spinner,'google',name)

    else:
        
        spinner.info(colorama.Fore.GREEN + '\"{}\" is already configured for the Kubeasy, Cheers !'.format(name))

    spinner.stop()
Пример #12
0
def curl(src, dest):
    """ Installs `src` to path `dest` """
    spinner = Halo(text="curl {}".format(dest),
                   spinner="dots",
                   placement="right")
    spinner.start()
    if os.path.exists(dest):
        spinner.info("{} already exists".format(dest))
        return

    try:
        sh.curl("-fLo", dest, src)
        spinner.succeed()
    except sh.ErrorReturnCode as err:
        err_message = "\n\t" + err.stderr.replace("\n", "\n\t")
        logging.error("Error downloading file `%s`: %s", src, err_message)
        spinner.fail()
Пример #13
0
def init_config(config_file_name):
  spinner = Halo(text='Initializing config', spinner='dots')
  spinner.start()

  # Decides where are we working on.
  env = os.getenv('ENV') or 'DEV'

  spinner.info(f'Using {env} environment.')

  # Fetches config.
  config_file_contents = configparser.ConfigParser()
  config_file_contents.read_file(open(config_file_name))

  config['data_origin_url'] = config_file_contents.get('DEFAULT', 'DataOriginURL')
  config['destination_url'] = config_file_contents.get(env, 'DestinationURL')
  config['keycloak_url'] = config_file_contents.get(env, 'KeycloakURL')
  config['mirid_controller_url'] = config_file_contents.get(env, 'mirIDControllerURL')
  config['EMPTY_FIELD_LITERAL'] = config_file_contents.get(env, 'EmptyFieldLiteral')
Пример #14
0
def main(config):
    print("\n[Time] %s" %
          datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
    print("开始创建登记任务")
    spinner = Halo(text='Loading', spinner='dots')
    spinner.start('正在创建登记实例...')
    ci = clockin(config)
    spinner.succeed('创建登记实例成功')

    spinner.start(text='登录...')

    try:
        log = ci.login()
        if log == 'un_uid_and_continue':
            spinner.info('学号不存在,测试登录')
        elif log == 'un_uid_and_exit':
            spinner.fail('学号不存在,请修改配置')
            return
        spinner.succeed('登陆成功')
    except Exception as err:
        spinner.fail(str(err))
        return

    if log == 'is_writted':
        spinner.info('今日已登记,无需再次登记')
        return

    # log = ci.for_iter()
    # if log == 'is_writted':
    #     spinner.info('今日已登记,无需再次登记')
    #     return

    spinner.start(text='打卡中...')

    try:
        ci.fillin()
        spinner.succeed('打卡成功')
        time.sleep(5)
        ci.driver.close()
    except Exception as err:
        spinner.fail(str(err))
        spinner.fail('打卡失败')
        return
Пример #15
0
def curl(src, dest):
    """ Installs `src` to path `dest` """
    spinner = Halo(
        text="curl {}".format(dest),
        spinner="dots",
        placement="right"
    )
    spinner.start()
    if os.path.exists(dest):
        spinner.info("{} already exists".format(dest))
        return

    try:
        sh.curl("-fLo", dest, src)
        spinner.succeed()
    except sh.ErrorReturnCode as err:
        err_message = "\n\t" + err.stderr.replace("\n", "\n\t")
        logging.error(
            "Error downloading file `%s`: %s", src, err_message
        )
        spinner.fail()
Пример #16
0
def add_students(
    github_students: List[str] = typer.Argument(
        metavar="student_handles",
        default=...,
        help="list of student handles separated by white space",
    ),
    github_team: Optional[str] = typer.Option(
        None, "--team", help="invite to the specfic team under organization",
    ),
    yes: bool = opt_all_yes,
    dry: bool = opt_dry,
    github_token: Optional[str] = opt_github_token,
    github_organization: Optional[str] = opt_gh_org,
):
    """
    Invite students to join our Github organization
    """
    ensure_config_exists()

    def fallback(val, fallback_value):
        return val if val else fallback_value

    # Handle default value manually because we'll change our config after app starts up
    github_token: str = fallback(
        github_token, app_context.config.github.personal_access_token
    )
    github_organization: str = fallback(
        github_organization, app_context.config.github.organization
    )
    github_team: str = fallback(
        github_team, app_context.config.add_students.default_team_slug
    )

    safety = SafetyActor(dry=dry)
    safety.ensure_gh_token(github_token)

    # TODO: use logging lib to log messages
    spinner = Halo(stream=sys.stderr)
    if dry:
        spinner.info("Dry run")

    if not (
        yes or typer.confirm(f"Add students to {github_organization}/{github_team}?")
    ):
        raise typer.Abort()

    spinner.info("fetch existing team members from GitHub")
    team = Team(
        dry=dry,
        org=github_organization,
        team_slug=github_team,
        github_token=github_token,
    )
    num_member = len(team.members.keys())

    spinner.succeed(f" target team: {github_team} ({num_member} members) ")

    existed_members = set(team.members.keys())
    outside_users = list(set(github_students) - existed_members)
    spinner.info("Check valid Github users")
    invalid_handles = invalid_user_handles(
        outside_users, github_token=github_token, safety=safety, spinner=spinner
    )

    if len(invalid_handles) != 0:
        print("non-existed github user handles:")
        # control strings take space
        print_table(invalid_handles)
    non_member_valid_users = list(set(outside_users) - set(invalid_handles))

    print(f"Users to add (total:{len(non_member_valid_users)})")
    print_table(non_member_valid_users)
    print("-" * 30)

    spinner.info("start to invite users")
    success_user, failed_users = invite_user_to_team(
        team=team, users=non_member_valid_users, spinner=spinner
    )
    if len(failed_users) > 0:
        print("Users failed to add")
        print_table(failed_users)

    spinner.succeed("Add students successfully")
def main(override_args=None):
    """Method to start the script"""
    logger.debug(f'START: get_threats_from_query_hash.py')

    # Load initial args
    parser = BaseScripts.start(
        'Retrieve a list of response from a given query hash.')
    parser.add_argument(
        '--query_fields',
        help=
        'fields to be retrieved from the threat (default: only the hashkey)\n'
        'If an atom detail isn\'t present in a particular atom, empty string is returned.',
        nargs='+',
        default=['threat_hashkey'],
    )
    parser.add_argument(
        '--list',
        help=
        'Turn the output in a list (require query_fields to be a single element)',
        action='store_true',
    )
    required_named = parser.add_argument_group('required arguments')
    required_named.add_argument(
        'query_hash',
        help=
        'the query hash from which to retrieve the response hashkeys or a path to the query body json file',
    )
    if override_args:
        args = parser.parse_args(override_args)
    else:
        args = parser.parse_args()
    configure_logging(args.loglevel)

    if len(args.query_fields) > 1 and args.list:
        parser.error(
            "List output format is only available if a single element is queried (via query_fields)"
        )

    query_body = {}
    query_hash = args.query_hash
    if len(query_hash) != 32 or os.path.exists(query_hash):
        try:
            with open(query_hash, 'r') as query_body_file:
                query_body = json.load(query_body_file)
        except FileNotFoundError:
            logger.error(
                f"Couldn't understand the given value as a query hash or path to query body: {query_hash}"
            )
            exit(1)

    # Load api_endpoints and tokens
    dtl = Datalake(env=args.env, log_level=args.loglevel)
    logger.debug(
        f'Start to search for threat from the query hash:{query_hash}')
    spinner = None
    if logger.isEnabledFor(logging.INFO):
        spinner = Halo(text=f'Creating bulk task', spinner='dots')
        spinner.start()

    task = dtl.BulkSearch.create_task(query_body=query_body,
                                      query_hash=query_hash,
                                      query_fields=args.query_fields)
    if spinner:
        spinner.text = f'Waiting for bulk task {task.uuid} response'
    response = task.download_sync()
    original_count = response.get('count', 0)
    if spinner:
        spinner.succeed()
        spinner.info(
            f'Number of threat that have been retrieved: {original_count}')

    formatted_output = format_output(response, args.list)
    if args.output:
        with open(args.output, 'w') as output:
            output.write(formatted_output)
    else:
        logger.info(formatted_output)

    if args.output:
        logger.info(f'Threats saved in {args.output}')
    else:
        logger.info('Done')
Пример #18
0
def filterScrape(need, category, page):

    spinner = Halo(text='Scraping content', spinner='dots', animation='bounce')
    headers = {
        'User-Agent':
        'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'
    }
    output_dic = OrderedDict()
    found = 0

    try:
        while (found < need):
            spinner.start()
            url = "https://ctftime.org/writeups?page={}&hidden-tags={}".format(
                page, category)
            spinner.text = "Scraping Page: {}".format(page)
            response = requests.get(url, headers=headers)
            soup = BeautifulSoup(response.content, 'html.parser')
            count_per_page = 0
            for tr in soup.find_all('tr')[1:]:
                tds = tr.find_all('td')
                w_no = tds[4].a["href"]
                task_name = tds[1].text
                writeup_url = "https://ctftime.org/" + w_no
                r = requests.get(writeup_url, headers=headers)
                spinner.text = "Parsing {} ({})".format(
                    w_no,
                    task_name.encode('ascii', 'ignore').decode('ascii'))
                spinner.color = "red"

                if (len(task_name) > 30):
                    task_name = task_name[:27] + '...'

                flag = 0
                original_url = ""
                new_soup = BeautifulSoup(r.content, 'lxml')
                a = new_soup.find_all('a')

                for link in a:
                    if link.text == "Original writeup":
                        original_url = link['href']
                        if (len(original_url) <= 125):
                            flag = 1
                            break
                if flag == 1:
                    if (task_name in output_dic):
                        output_dic[task_name] += '\n' + original_url
                    else:
                        output_dic[task_name] = original_url
                        count_per_page += 1
                        found += 1
                else:
                    if task_name not in output_dic:
                        count_per_page += 1
                        found += 1
                    output_dic[task_name] = writeup_url

                if (found == need):
                    break
                else:
                    continue

            if (count_per_page == 0):
                spinner.fail("Page {} doesn't exist.".format(page))
                spinner.info("Try decreasing the Page Seed or limit")
                spinner.info("Try changing the category")
                print(
                    "Such as : Change 'rev' -> 'reverse engineering' to get more results"
                )
                break
            else:
                spinner.succeed(
                    "Gathered writeups for {} tasks from page {}".format(
                        count_per_page, page))
                spinner.color = "cyan"
                page += 1

        return output_dic

    except (KeyboardInterrupt, SystemExit):
        spinner.warn('Program exited unexpectedly')
        exit()
Пример #19
0
def announce_grade(
    homework_prefix: str = typer.Argument(
        ..., help="prefix of the target homework"),
    feedback_source_repo: Optional[str] = typer.Option(
        None, show_default=True, help="Repo contains students' feedbacks"),
    only_id: Optional[str] = typer.Option(default=None,
                                          help="only id to announce"),
    token: Optional[str] = opt_github_token,
    org: str = opt_gh_org,
    dry: bool = typer.Option(
        False, "--dry", help="dry run, do not publish result to the remote"),
    yes: bool = opt_all_yes,
):
    """Announce student grades to each hw repo"""
    ensure_config_exists()

    def fallback(val, fallback_value):
        return val if val else fallback_value

    # Handle default value manually because we'll change our config after app starts up
    token: str = fallback(token,
                          app_context.config.github.personal_access_token)
    org: str = fallback(org, app_context.config.github.organization)
    feedback_source_repo: str = fallback(
        feedback_source_repo,
        app_context.config.announce_grade.feedback_source_repo)

    ensure_gh_token(token)
    if not (yes or
            typer.confirm(f"Add annouce_grade to {org}/{homework_prefix}?")):
        raise typer.Abort()

    # TODO: use logging lib to log messages
    spinner = Halo(stream=sys.stderr)

    student_feedback_title = f"Grade for {homework_prefix}"

    gstudents = Gstudents()
    feedback_vars = gstudents.left_join(homework_prefix)

    # Clone feedback repo & set needed variables
    cur = Path(".")

    for d in cur.glob("feedback-tmp-*"):
        shutil.rmtree(d)
    spinner.info("delete dated folder")

    root_folder = Path(
        tempfile.mkdtemp(
            prefix="feedback-tmp-{}-".format(
                datetime.now().strftime("%b%d%H%M%S")),
            dir=".",
        ))
    spinner.succeed(f"Create tmp folder {root_folder}")

    feedback_repo_path = root_folder / "feedbacks"

    spinner.info(f"cloning feeback source repo : {feedback_source_repo}")
    _, t = measure_time(sp.run)(
        [
            "git",
            "clone",
            f"https://github.com/{org}/{feedback_source_repo}.git",
            feedback_repo_path.name,
        ],
        cwd=root_folder,
    )
    spinner.succeed(
        f"cloning feeback source repo : {feedback_source_repo} ... {t:4.2f} sec"
    )
    client = httpx.AsyncClient(headers=httpx.Headers(
        {
            "User-Agent": "GitHubClassroomUtils/1.0",
            "Authorization": "token " + token,
            # needed for the check-suites request
            "Accept": "application/vnd.github.antiope-preview+json",
        }))

    hw_path = feedback_repo_path / homework_prefix / "reports"

    # generate feedbacks
    fbs, t = measure_time(gen_feedbacks)(homework_prefix, hw_path,
                                         feedback_vars)
    spinner.succeed(f"Generate content for feedbacks ... {t:5.3f} sec")

    # handle only_id
    if only_id:
        try:
            # detect possible buggy condition
            info = gstudents.get_student(only_id)
        except RuntimeError as e:
            print(" *=" * 30)
            print("Warning!")
            print(e)
            return
        only_repo_name = get_hw_repo_name(homework_prefix,
                                          info["github_handle"])
        fbs = list(filter(lambda fb: fb["repo_name"] == only_repo_name, fbs))

    async def push_to_remote(feedback_title, feedbacks):
        # push to remote
        async def push_feedback(fb):
            request_body = {"title": feedback_title, "body": fb["value"]}
            try:
                issue_num = await find_existing_issue(client, org,
                                                      fb["repo_name"],
                                                      feedback_title)
            except BaseException:
                print(f'error on {fb["repo_name"]}')
                return
            if issue_num:
                request_body["state"] = "open"  # reopen issue
                url = f"https://api.github.com/repos/{org}/{fb['repo_name']}/issues/{issue_num}"
                await edit_issue_async(client, url, issue_num, request_body)
            else:
                url = f"https://api.github.com/repos/{org}/{fb['repo_name']}/issues"
                await create_issue_async(client, url, request_body)
            print(f'success {fb["repo_name"]}')

        async with trio.open_nursery() as nursery:
            for fb in feedbacks:
                nursery.start_soon(push_feedback, fb)

    # print out target repos
    print("repo to announce grade:")
    pprint([fb["repo_name"] for fb in fbs])

    if dry:
        spinner.succeed("DRYRUN: skip push to remote")
    else:
        if typer.confirm("Do you want to continue?", default=False):
            _, t = measure_time(trio.run)(push_to_remote,
                                          student_feedback_title, fbs)
            spinner.succeed(f"Push feedbacks to remote ... {t:5.2f} sec")
        else:
            spinner.warn("You refused to publish to remote")

    spinner.succeed("finished announce grade")
    return
Пример #20
0
def announce_grade(homework_prefix, token, dry, org, only_id,
                   feedback_source_repo):
    """announce student grades to each hw repo"""

    ensure_gh_token(token)
    # TODO: use logging lib to log messages
    spinner = Halo(stream=sys.stderr)

    student_feedback_title = f"Grade for {homework_prefix}"

    gstudents = Gstudents()
    feedback_vars = gstudents.left_join(homework_prefix)

    # Clone feedback repo & set needed variables
    cur = Path(".")

    for d in cur.glob("feedback-tmp-*"):
        shutil.rmtree(d)
    spinner.info("delete dated folder")

    root_folder = Path(
        tempfile.mkdtemp(
            prefix="feedback-tmp-{}-".format(
                datetime.now().strftime("%b%d%H%M%S")),
            dir=".",
        ))
    spinner.succeed(normal.txt("Create tmp folder ").kw(root_folder).to_str())

    feedback_repo_path = root_folder / "feedbacks"

    spinner.info(f"cloning feeback source repo : {feedback_source_repo}")
    _, t = measure_time(sp.run)(
        [
            "git",
            "clone",
            f"https://github.com/{org}/{feedback_source_repo}.git",
            feedback_repo_path.name,
        ],
        cwd=root_folder,
    )
    spinner.succeed(
        f"cloning feeback source repo : {feedback_source_repo} ... {t:4.2f} sec"
    )
    client = httpx.AsyncClient(headers=httpx.Headers(
        {
            "User-Agent": "GitHubClassroomUtils/1.0",
            "Authorization": "token " + token,
            # needed for the check-suites request
            "Accept": "application/vnd.github.antiope-preview+json",
        }))

    hw_path = feedback_repo_path / homework_prefix / "reports"

    # generate feedbacks
    fbs, t = measure_time(gen_feedbacks)(homework_prefix, hw_path,
                                         feedback_vars)
    spinner.succeed(f"Generate content for feedbacks ... {t:5.3f} sec")

    # handle only_id
    if only_id:
        try:
            # detect possible buggy condition
            info = gstudents.get_student(only_id)
        except RuntimeError as e:
            print(" *=" * 30)
            print("Warning!")
            print(e)
            return
        only_repo_name = get_hw_repo_name(homework_prefix,
                                          info["github_handle"])
        fbs = list(filter(lambda fb: fb["repo_name"] == only_repo_name, fbs))

    async def push_to_remote(feedback_title, feedbacks):
        # push to remote
        async def push_feedback(fb):
            request_body = {"title": feedback_title, "body": fb["value"]}
            try:
                issue_num = await find_existing_issue(client, org,
                                                      fb["repo_name"],
                                                      feedback_title)
            except BaseException:
                print(f'error on {fb["repo_name"]}')
                return
            if issue_num:
                request_body["state"] = "open"  # reopen issue
                url = f"https://api.github.com/repos/{org}/{fb['repo_name']}/issues/{issue_num}"
                await edit_issue_async(client, url, issue_num, request_body)
            else:
                url = f"https://api.github.com/repos/{org}/{fb['repo_name']}/issues"
                await create_issue_async(client, url, request_body)
            print(f'success {fb["repo_name"]}')

        async with trio.open_nursery() as nursery:
            for fb in feedbacks:
                nursery.start_soon(push_feedback, fb)

    # print out target repos
    print("repo to announce grade:")
    pprint([fb["repo_name"] for fb in fbs])

    if dry:
        spinner.succeed("DRYRUN: skip push to remote")
    else:
        if click.confirm("Do you want to continue?", default=False):
            _, t = measure_time(trio.run)(push_to_remote,
                                          student_feedback_title, fbs)
            spinner.succeed(f"Push feedbacks to remote ... {t:5.2f} sec")
        else:
            spinner.warn("You refused to publish to remote")

    spinner.succeed("finished announce grade")
    return
        print("Could not submit job!")
        sys.exit(1)

    job_id = resp['job_id']

    job_status_code = None

    spinner = Halo(text="Waiting for job updates", spinner='dots')
    spinner.start()

    while True:
        try:
            new_job_status_code = get_job_status(job_id)
            if new_job_status_code != job_status_code:
                if new_job_status_code in ("UNASSIGNED", "ASSIGNED"):
                    spinner.info(STATUS_CODE_MESSAGES[new_job_status_code])
                    spinner.start("Waiting for job updates")
                elif new_job_status_code == "FAILED":
                    spinner.fail(STATUS_CODE_MESSAGES[new_job_status_code])
                    spinner.start("Waiting for job updates")
                else:
                    # the job has succeeded
                    spinner.stop_and_persist(
                        symbol='🦄'.encode('utf-8'),
                        text=STATUS_CODE_MESSAGES[new_job_status_code])
                    break
                job_status_code = new_job_status_code
            time.sleep(JOB_STATUS_POLL_INTERVAL_SECS)
        except (KeyboardInterrupt, SystemExit):
            spinner.stop()
            break
Пример #22
0
def file(path, template_file, load_vars=lambda: {}):
    """ Installs a template file using symlinks.

    If a file already exists at the specified path and it is not a symbolic
    link, then this function will print an error and return. If the file is
    a symlink to the `build` directory of your dotfiles repo, then this will
    check to see if the template has been modified since the file was last
    built.

    Args:
        path (str): Filesystem path where we should install the filled out
            template file.
        template_file (str): The filename of the template to install. The
            file should be located in the $ROOT/templates directory of this
            repository.
        load_vars (func): A function that will be run when the file is built to
            fill in template information. This is passed in as a function so
            that user input is only asked for when the file is built.
    """
    spinner = Halo(text=path, spinner="dots", placement="right")
    spinner.start()
    if os.path.exists(path) and not os.path.islink(path):
        print("Error: {} exists and is not a soft link".format(path))
        spinner.fail()
        return

    try:
        # Load template as a Jinja2 Template
        template_path = os.path.join(
            ROOT, os.path.join("templates", template_file)
        )
        template_mtime = os.path.getmtime(template_path)
        with open(template_path, "r") as template_file:
            template = Template(template_file.read())

        build_path = os.path.join(
            ROOT, os.path.join("build", os.path.basename(path))
        )
        if not os.path.exists(build_path):
            build_mtime = 0
        else:
            build_mtime = os.path.getmtime(build_path)

        # Build the template if the template has been modified since last build
        if template_mtime > build_mtime:
            # TODO (plemons): I should only do this if I actually need user
            # input. Theoretically, the load_vars function could just read
            # from a config file making this unnecessary
            spinner.info("Asking for user input for {}".format(path))
            if not os.path.exists(os.path.dirname(build_path)):
                os.makedirs(os.path.dirname(build_path))
            with open(build_path, 'w') as outfile:
                outfile.write(template.render(**load_vars()))

        path = os.path.expanduser(path)
        dirpath = os.path.dirname(path)
        if not os.path.exists(dirpath):
            os.makedirs(dirpath)
        if os.path.islink(path):
            os.unlink(path)
        os.symlink(build_path, path)
        spinner.succeed()
    except OSError as err:
        print(err)
        spinner.fail()
Пример #23
0
class ResourceReporter:
    def __init__(self):
        self.spinner = Halo(text='', spinner='dots')

    def progress(self, resource):
        self.spinner.start(text='[ {0: <10}] {1}'.format(
            resource.status, resource.description))

    def succeed(self, resource):
        self.spinner.succeed(text='[ {0: <10}] {1} [{2}].'.format(
            resource.status, resource.description, resource.resource_id))

    def fail(self, resource):
        self.spinner.fail(text='[ {0: <10}] {1}'.format(
            resource.status, resource.description))

    def warn(self, text):
        self.spinner.warn(text=text)

    def info(self, resource):
        self.spinner.info(text='[ {} ] {} [{}].'.format(
            resource.status, resource.description, resource.resource_id))

    def report_stack_creation(self, name, resources, stack_id):
        cf = boto3.session.Session().resource('cloudformation')
        stack = cf.Stack(stack_id)
        rmap = {r.name: r for r in resources}
        completed = set()
        for r in resources:
            if r.status == Status.provided or r.status == Status.created:
                self.succeed(r)
                completed.add(r.name)

        current = None
        while stack.stack_status in [
                'CREATE_IN_PROGRESS', 'ROLLBACK_IN_PROGRESS'
        ]:
            states = {
                rs.logical_resource_id: rs
                for rs in stack.resource_summaries.all()
            }
            if current:
                rs = states.get(current)
                r = rmap.get(current)
                if rs.resource_status in ['CREATE_IN_PROGRESS']:
                    time.sleep(2)
                    continue
                else:
                    self.report_completed_resource(completed, r, rs,
                                                   ['CREATE_COMPLETE'],
                                                   Status.created)
                    current = None

            for name, rs in iteritems(states):
                r = rmap.get(name)
                if name not in completed and r:
                    if rs.resource_status in ['CREATE_IN_PROGRESS']:
                        current = name
                        r.status = Status.creating
                        self.progress(r)
                        break

                    self.report_completed_resource(completed, r, rs,
                                                   ['CREATE_COMPLETE'],
                                                   Status.created)
            time.sleep(2)
            stack = cf.Stack(stack_id)

        states = {
            rs.logical_resource_id: rs
            for rs in stack.resource_summaries.all()
        }
        if current:
            r = rmap.get(current)
            rs = states.get(current)
            self.report_completed_resource(completed, r, rs,
                                           ['CREATE_COMPLETE'], Status.created)

        for name, rs in iteritems(states):
            r = rmap.get(name)
            if name not in completed and r:
                self.report_completed_resource(completed, r, rs,
                                               ['CREATE_COMPLETE'],
                                               Status.created)

        if stack.stack_status in [
                'CREATE_FAILED', 'ROLLBACK_IN_PROGRESS', 'ROLLBACK_FAILED',
                'ROLLBACK_COMPLETE'
        ]:
            raise EKSCliException('Failed to create EKS cluster {}: {}'.format(
                name, stack.stack_status))

        return stack

    def report_completed_resource(self, completed, resource, resource_summary,
                                  success_states, success):
        if resource_summary.resource_status in success_states:
            resource.status = success
            resource.resource_id = resource_summary.physical_resource_id
            self.succeed(resource)
        else:
            resource.status = Status.failed
            self.fail(resource)
        completed.add(resource.name)

    def report_stack_deletion(self, name, resources, stack_id):
        cf = boto3.session.Session().resource('cloudformation')
        stack = cf.Stack(stack_id)
        rmap = {r.name: r for r in resources}
        completed = set()
        for r in resources:
            if r.status == Status.not_exist or r.status == Status.deleted or r.status == Status.provided:
                self.succeed(r)
                completed.add(r.name)

        current = None
        while stack.stack_status not in ['DELETE_COMPLETE', 'DELETE_FAILED']:
            states = {
                rs.logical_resource_id: rs
                for rs in stack.resource_summaries.all()
            }
            if current:
                rs = states.get(current)
                r = rmap.get(current)
                if rs.resource_status in [
                        'DELETE_IN_PROGRESS', 'CREATE_COMPLETE',
                        'UPDATE_COMPLETE'
                ]:
                    time.sleep(2)
                    continue
                else:
                    self.report_completed_resource(completed, r, rs,
                                                   ['DELETE_COMPLETE'],
                                                   Status.deleted)
                    current = None

            for name, rs in iteritems(states):
                r = rmap.get(name)
                if name not in completed and r:
                    if rs.resource_status in [
                            'DELETE_IN_PROGRESS', 'CREATE_COMPLETE',
                            'UPDATE_COMPLETE'
                    ]:
                        current = name
                        r.status = Status.deleting
                        self.progress(r)
                        break

                    self.report_completed_resource(completed, r, rs,
                                                   ['DELETE_COMPLETE'],
                                                   Status.deleted)
            time.sleep(2)
            stack = cf.Stack(stack_id)

        states = {
            rs.logical_resource_id: rs
            for rs in stack.resource_summaries.all()
        }
        if current:
            r = rmap.get(current)
            rs = states.get(current)
            self.report_completed_resource(completed, r, rs,
                                           ['DELETE_COMPLETE'], Status.deleted)

        for name, rs in iteritems(states):
            r = rmap.get(name)
            if name not in completed and r:
                self.report_completed_resource(completed, r, rs,
                                               ['DELETE_COMPLETE'],
                                               Status.deleted)

        if stack.stack_status in ['DELETE_FAILED']:
            raise EKSCliException('Failed to create EKS cluster {}: {}'.format(
                name, stack.stack_status))

        return
Пример #24
0
from model import *
from uuid import uuid4
from halo import Halo
import json
spinner = Halo(text_color="blue")
spinner.start()
try:
    spinner.info(text="initializing test")
    #User test
    SessionID = str(uuid4())
    spinner.info(text="Creating User => ")
    user = User(name="usertest",
                email="*****@*****.**",
                password="******",
                SessionID=SessionID)
    user.add_user()
    spinner.text_color = "green"
    spinner.succeed(text="passed")
    print('+++++++++++++++++++++++++')
    spinner.text_color = "blue"
    spinner.info(text="Recovering User =>")
    document = user.find_user_by_SessionID(SessionID)
    document['_id'] = str(document['_id'])
    print(json.dumps(document, indent=4, sort_keys=True))
    spinner.text_color = "green"
    spinner.succeed(text="passed")
    print('+++++++++++++++++++++++++')
    spinner.text_color = "blue"
    spinner.info(text="Loging User =>")
    print(user.login(SessionID))
    spinner.text_color = "green"
Пример #25
0
while True:
    try:
        spinner.start()
        subreddit = reddit.subreddit('random')
        domains = ['i.redd.it', 'i.imgur.com']
        limit = None
        print('Random Subreddit Is: ', subreddit)

        submissions = list(subreddit.top('all', limit=limit))
        submission = random.choice(submissions)
        if submission.domain in domains:
            im = pyimgur.Imgur(imgur_id)
            uploaded_image = im.upload_image(url=submission.url)
            with open('links.txt', "a") as f:
                f.write(uploaded_image.link + "\n")
            reddit.validate_on_submit = True
            subreddit.submit(submission.title, url=uploaded_image.link)
            spinner.succeed('success')

        elif submission.domain not in domains:
            spinner.info('domain is not in domains :(')

    except Exception as e:
        exc = str(str(e))
        spinner.fail(text=exc)
        time.sleep(60)

    except KeyboardInterrupt:
        spinner.warn(text='shutting down :(')
        quit()
Пример #26
0
    def _update_with_appimageupdatetool(self,
                                        path_appimageupdate,
                                        path,
                                        update_old_data=True,
                                        show_spinner=True):
        path_to_old_appimage = path
        spinner = Halo('Checking for updates', spinner='dots')
        if show_spinner:
            spinner.start()
        _check_update_command = shlex.split(
            "{au} --check-for-update {app}".format(
                au=path_appimageupdate,
                app=path_to_old_appimage,
            ))

        _check_update_proc = subprocess.Popen(_check_update_command,
                                              stdout=subprocess.PIPE,
                                              stderr=subprocess.PIPE)
        e_code = _check_update_proc.wait(600)
        if e_code == 0:
            if show_spinner:
                spinner.succeed("Already up-to-date!")
            return
        elif e_code == 1:
            if show_spinner:
                spinner.info("Updates found")
                spinner.start("Updating {}".format(self.app))
            _update_proc = subprocess.Popen(shlex.split(
                "{au} --remove-old {app}".format(
                    au=path_appimageupdate,
                    app=path_to_old_appimage,
                )),
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.PIPE)
            _update_proc_e_code = _update_proc.wait(5000)
            _update_proc_out, _update_proc_err = \
                (x.decode() for x in _update_proc.communicate())
            if _update_proc_e_code == 0:
                # update completed successfully
                if show_spinner:
                    spinner.succeed("Update Successful!")
                    spinner.start("Setting up new AppImage")
                _file = re.findall(r"Target file: (.*)", _update_proc_out)

                if len(_file) == 1:
                    output_file = _file[0]
                    if show_spinner:
                        spinner.info("New file name is {}".format(output_file))
                    if update_old_data:
                        _cb_data = self.appdata()
                        _cb_data['path'] = output_file
                        command_wrapper_file_path = \
                            os.path.join(self.cfgmgr.bin, self.app)

                        with open(self.app_data_path, 'w') as w:
                            json.dump(_cb_data, w)

                        with open(command_wrapper_file_path, 'w') as fp:
                            fp.write(
                                COMMAND_WRAPPER.format(
                                    path_to_appimage=output_file))
                        if show_spinner:
                            spinner.start("Configuring desktop files...")
                        try:
                            libappimage = LibAppImage()
                            if libappimage.is_registered_in_system(
                                    path_to_old_appimage):
                                libappimage.unregister_in_system(
                                    path_to_old_appimage)
                            libappimage.register_in_system(output_file)
                        except LibAppImageRuntimeError:
                            pass  # TODO: add some more stuff here
                        except LibAppImageNotFoundError:
                            pass  # TODO: add some more stuff here
                    if show_spinner:
                        spinner.succeed("Done!")
                else:
                    if show_spinner:
                        spinner.stop()
                    print(_file)
                    raise RuntimeError("More than one link found")
            else:
                # Was unsuccessful
                if show_spinner:
                    spinner.fail("Update failed! :'(")
                    spinner.start("Cleaning up")
                print(_update_proc_out, _update_proc_err)
        elif show_spinner:
            spinner.fail("Update information is not embedded within the "
                         "AppImage. ")
            spinner.fail("Consider informing the AppImage author to add a "
                         ".zsync file")
            spinner.fail("Alternatively, pass the --no-appimageupdate option")

        out, err = (x.decode() for x in _check_update_proc.communicate())
        print(out, err)
        if show_spinner:
            spinner.stop()
Пример #27
0
def add_students(student_handles, dry, token, org, team):
    """
    student_handles: github user to add (usernames)
    """
    if len(student_handles) == 0:
        print("required handles")
        return 1

    github_students = student_handles
    github_organization = org
    github_team = team
    github_token = token

    ensure_gh_token(github_token)

    # TODO: use logging lib to log messages
    spinner = Halo(stream=sys.stderr)
    if dry:
        spinner.info("Dry run")

    spinner.info("fetch existing team members from GitHub")
    team = Team(github_organization,
                team_slug=github_team,
                github_token=github_token)
    num_member = len(team.members.keys())
    words = (normal.txt("target team: ").kw(f"{github_team}").txt("( ").kw2(
        num_member).txt(" members) "))
    spinner.succeed(words.to_str())

    if dry:
        existed_members = set()
    else:
        existed_members = set(team.members.keys())
    outside_users = list(set(github_students) - existed_members)

    # print("Users to invite:")
    # print_table(outside_users, cols=5, wide=15)

    spinner.info("Check valid Github users")
    invalid_id = []
    spinner.start()
    total = len(outside_users)
    for idx, u in enumerate(outside_users, start=1):
        text = "" if not dry else "[skip]: "
        text += f"{idx}/{total} Check valid GitHub username : {u}"
        if dry:
            spinner.succeed(text)
        else:
            if check_is_github_user(u, github_token):
                spinner.succeed(text)
            else:
                spinner.fail(text)
                invalid_id.append(u)

    if len(invalid_id) != 0:
        print("Find non-existed github user names:")
        # control strings take space
        print_table([warn.txt("i").to_str() for i in invalid_id],
                    cols=5,
                    wide=25)

    non_member_valid_users = list(set(outside_users) - set(invalid_id))

    # membership info
    membership_infos = {key: "unknown" for key in non_member_valid_users}
    total = len(non_member_valid_users)
    spinner.info("Check Membership information")
    for idx, username in enumerate(non_member_valid_users, start=1):
        skip = "" if not dry else "[skip]: "
        spinner.start(f"{skip}{idx}/{total}: {username}")
        if not dry:
            res = team.get_memberships(username)
            if res.status_code == 200:
                membership_infos[username] = res.json()["state"]
        spinner.succeed()

    pending_users = [
        u for u in membership_infos.keys() if membership_infos[u] == "pending"
    ]
    no_memship_users = [
        u for u in membership_infos.keys() if membership_infos[u] == "unknown"
    ]

    print(f"Users already in pending state (total:{len(pending_users)}):")
    print_table(pending_users)

    print(f"Users to add (total:{len(no_memship_users)})")
    print_table(no_memship_users)
    print("-" * 30)

    failed_users = []
    spinner.info("start to invite users")
    for user_name in no_memship_users:
        if dry:
            spinner.info(f"[Skip] add user: {user_name}")
        else:
            if True == add_user(team, user_name=user_name):
                spinner.succeed(f"add user: {user_name}")
            else:
                failed_users.append(user_name)
                spinner.fail(f"failed to add user: {user_name}")
    failed_users = list(set(failed_users))

    if len(failed_users) != 0:
        print("Users failed to add")
        print_table(failed_users)

    spinner.succeed("Adding students successfully")
Пример #28
0
def patch_project(hw_prefix, patch_branch, source_repo, token, org, only_repo,
                  dry):
    """Patch to student homeworks"""
    ensure_git_cached()
    ensure_gh_token(token)
    # init
    spinner = Halo(stream=sys.stderr)

    if source_repo == "":
        source_repo = f"tmpl-{hw_prefix}-revise"

    # Check if repo already contains the patched branch. Skip if so.
    #  api : https://developer.github.com/v3/git/refs/#get-a-reference
    res = requests.get(
        f"https://api.github.com/repos/{org}/{source_repo}/git/refs/heads/{patch_branch}",
        headers=github_headers(token),
    )
    if res.status_code != 200:  # this branch not exists on the remote
        spinner.fail(
            f"branch : `{patch_branch}` doesn't exist on repo:{org}/{source_repo} "
        )
        return

    cur = Path(".")
    for d in cur.glob("patch-*"):
        shutil.rmtree(d)
    spinner.info("delete dated folder")

    spinner.start(
        normal.txt("Fetch issue template").kw(patch_branch).txt(" from ").kw(
            source_repo).to_str())
    # Fetch patch template on the source repo
    issues = get_github_endpoint_paged_list(
        endpoint=f"repos/{org}/{source_repo}/issues",
        github_token=token,
        verbose=False)

    def find_target_issue() -> Optional[Dict]:
        for issue in issues:
            if issue["title"].strip() == patch_branch.strip():
                return issue
        return None

    target_issue = find_target_issue()
    if not target_issue:
        raise Exception(
            f"cannot found issue tmpl `{patch_branch}` on `{source_repo}`")
    issue_tmpl_body = target_issue["body"]
    spinner.succeed()

    root_folder = Path(
        tempfile.mkdtemp(
            prefix="patch-{}-{}-".format(
                patch_branch,
                datetime.now().strftime("%b%d%H%M%S")),
            dir=".",
        ))

    spinner.succeed(normal.txt("Create tmp folder ").kw(root_folder).to_str())
    spinner.info(
        normal.txt("Fetch soure repo").kw(source_repo).txt(
            " from GitHub ").to_str())
    src_repo_path = root_folder / "source_repo"
    sp.run(
        [
            "git",
            "clone",
            f"https://github.com/{org}/{source_repo}.git",
            src_repo_path.name,
        ],
        cwd=root_folder,
    )

    src_repo = Repo(src_repo_path)
    sp.run(
        ["git", "checkout", "--track", f"origin/{patch_branch}"],
        cwd=src_repo_path,
        stdout=sp.DEVNULL,
        stderr=sp.DEVNULL,
    )
    spinner.succeed()

    # Pasting changed files into students repo
    src_repo_git = src_repo.git
    src_repo_git.checkout(patch_branch)
    changed_files, renamed_files = get_changed_files(
        master_commit=src_repo.heads["master"].commit,
        patch_commit=src_repo.heads[patch_branch].commit,
    )

    spinner.start("Fetch information for homework repo")
    spinner.succeed()
    if only_repo is not None:
        repos = [
            re for re in query_matching_repos(org,
                                              github_repo_prefix=only_repo,
                                              github_token=token,
                                              verbose=False)
            if re["name"] == only_repo
        ]
        repo = next(iter(repos), None)
        if repo:
            spinner.info(
                normal.txt("Only patch to repo : ").kw(repo["name"]).to_str())
        repos = [repo]
    else:
        repos = query_matching_repos(org,
                                     github_repo_prefix=hw_prefix,
                                     github_token=token,
                                     verbose=False)
    spinner.succeed()

    # Patch to student repos
    student_path = root_folder / "student_repos"
    student_path.mkdir()
    for repo_idx, r in enumerate(repos, start=1):
        pre_prompt_str = (normal.txt(f"({repo_idx}/{len(repos)})").kw(
            f" {r['name']} ").to_str())
        spinner.start()

        # Check if repo already contains the patched branch. Skip if so.
        #  api : https://developer.github.com/v3/git/refs/#get-a-reference
        res = requests.get(
            f"https://api.github.com/repos/{org}/{r['name']}/git/refs/heads/{patch_branch}",
            headers=github_headers(token),
        )
        if res.status_code == 200:  # this branch exists in the remote
            spinner.text = (
                pre_prompt_str +
                normal.kw("  Skip  ").txt("already patched ").to_str())
            spinner.succeed()
            continue

        spinner.text = pre_prompt_str + normal.txt(" cloning repo...").to_str()
        sp.run(
            ["git", "clone", "--depth=1", r["html_url"]],
            cwd=student_path,
            stdout=sp.DEVNULL,
            stderr=sp.DEVNULL,
        )

        hw_repo_name = r["html_url"].rsplit("/")[-1]

        # open a new branch & checkout to that branch
        sp.run(
            ["git", "checkout", "-b", patch_branch],
            cwd=student_path / hw_repo_name,
            stdout=sp.DEVNULL,
            stderr=sp.DEVNULL,
        )

        # copy file to student repo
        for f in changed_files.keys():
            (student_path / hw_repo_name / f).parent.mkdir(parents=True,
                                                           exist_ok=True)
            shutil.copyfile(src=src_repo_path / f,
                            dst=student_path / hw_repo_name / f)
        for f in renamed_files.keys():
            os.remove(student_path / hw_repo_name / f)

        # changed_files = get_changed_files(
        #     master_commit = src_repo.heads['master'].commit,
        #     patch_commit = src_repo.heads[patch_branch].commit
        # )
        # push (publish) that branch to student repo
        sp.run(
            ["git", "add", "."],
            cwd=student_path / hw_repo_name,
            stdout=sp.DEVNULL,
            stderr=sp.DEVNULL,
        )

        # Pass if no changed
        student_repo = Repo(student_path / hw_repo_name)
        if len(student_repo.index.diff("HEAD")) == 0:
            spinner.text = (
                pre_prompt_str +
                normal.kw2("  Passed  ").txt("Repo no change").to_str())
            spinner.succeed()
            continue

        sp.run(
            [
                "git", "commit", "-m",
                f":construction_worker: Patch: {patch_branch}"
            ],
            cwd=student_path / hw_repo_name,
            stdout=sp.DEVNULL,
            stderr=sp.DEVNULL,
        )

        spinner.text = pre_prompt_str + normal.kw(
            " publish patch to remote...").to_str()
        if dry:
            spinner.succeed(pre_prompt_str + normal.txt(" Patched ").to_str())
            continue
        res = sp.run(
            ["git", "push", "-u", "origin", patch_branch],
            cwd=student_path / hw_repo_name,
            stdout=sp.DEVNULL,
            stderr=sp.DEVNULL,
        )
        if res.returncode != 0:
            spinner.text = (pre_prompt_str + warn.kw("  Failed  ") +
                            warn.txt(" Cannot push branch ").kw2(
                                patch_branch).txt(" to origin").to_str())
            spinner.fail()
            continue

        # open an pull-request on students repo
        # student_repo/patch-branch  -> student_repo/master
        body = {
            "title": f"[PATCH] {patch_branch}",
            "body": issue_tmpl_body,
            "head": patch_branch,
            "base": "master",
        }
        res = requests.post(
            f"https://api.github.com/repos/{org}/{r['name']}/pulls",
            headers=github_headers(token),
            json=body,
        )
        if res.status_code == 201:
            spinner.text = pre_prompt_str + normal.txt(" Patched ").to_str()
            spinner.succeed()
        else:
            spinner.text = (pre_prompt_str + warn.kw("  Failed  ") +
                            warn.txt("Cannot create PR").kw2(patch_branch).txt(
                                "to origin/master").to_str())
            spinner.fail()
            try:
                info = warn.txt("    ").txt(
                    res.json()["errors"][0]["message"]).to_str()
                print(info)
            except:
                pass
            continue
Пример #29
0
def announce_grade(homework_prefix, token, org, only_id, feedback_source_repo):
    '''announce student grades to each hw repo'''

    ensure_gh_token(token)
    # TODO: use logging lib to log messages
    spinner = Halo(stream=sys.stderr)

    student_feedback_title = f"Grade for {homework_prefix}"

    gstudents = Gstudents()
    feedback_vars = gstudents.left_join(homework_prefix)

    # Clone feedback repo & set needed variables
    cur = Path('.')

    for d in cur.glob("feedback-tmp-*"):
        shutil.rmtree(d)
    spinner.info("delete dated folder")

    root_folder = Path(
        tempfile.mkdtemp(prefix="feedback-tmp-{}-".format(
            datetime.now().strftime("%b%d%H%M%S")),
                         dir="."))
    spinner.succeed(normal.txt('Create tmp folder ').kw(root_folder).to_str())

    feedback_repo_path = root_folder / 'feedbacks'

    spinner.start(f"cloning feeback source repo : {feedback_source_repo}")
    _, t = measure_time(sp.run)([
        'git',
        'clone',
        f'https://github.com/{org}/{feedback_source_repo}.git',
        feedback_repo_path.name,
    ],
                                cwd=root_folder,
                                stdout=sp.DEVNULL,
                                stderr=sp.DEVNULL)
    spinner.succeed(
        f"cloning feeback source repo : {feedback_source_repo} ... {t:4.2f} sec"
    )
    client = httpx.AsyncClient(headers=httpx.Headers(
        {
            "User-Agent": "GitHubClassroomUtils/1.0",
            "Authorization": "token " + token,
            # needed for the check-suites request
            "Accept": "application/vnd.github.antiope-preview+json"
        }))

    hw_path = feedback_repo_path / homework_prefix / 'reports'

    # generate feedbacks
    fbs, t = measure_time(gen_feedbacks)(homework_prefix, hw_path,
                                         feedback_vars)
    spinner.succeed(f"Generate content for feedbacks ... {t:5.3f} sec")

    # handle only_id
    if only_id:
        try:
            # detect possible buggy condition
            info = gstudents.get_student(only_id)
        except RuntimeError as e:
            print(' *=' * 30)
            print('Warning!')
            print(e)
            return
        only_repo_name = get_hw_repo_name(homework_prefix,
                                          info['github_handle'])
        fbs = list(filter(lambda fb: fb['repo_name'] == only_repo_name, fbs))

    async def push_to_remote(feedback_title, feedbacks):
        # push to remote
        async def push_feedback(fb):
            request_body = {'title': feedback_title, 'body': fb['value']}
            try:
                issue_num = await find_existing_issue(client, org,
                                                      fb['repo_name'],
                                                      feedback_title)
            except BaseException as e:
                print(f'error on {fb["repo_name"]}')
                return
            if issue_num:
                request_body['state'] = 'open'  # reopen issue
                url = f"https://api.github.com/repos/{org}/{fb['repo_name']}/issues/{issue_num}"
                await edit_issue_async(client, url, issue_num, request_body)
            else:
                url = f"https://api.github.com/repos/{org}/{fb['repo_name']}/issues"
                await create_issue_async(client, url, request_body)
            print(f'success {fb["repo_name"]}')

        async with trio.open_nursery() as nursery:
            for fb in feedbacks:
                nursery.start_soon(push_feedback, fb)

    _, t = measure_time(trio.run)(push_to_remote, student_feedback_title, fbs)
    spinner.succeed(f"Push feedbacks to remote ... {t:5.2f} sec")
    spinner.succeed(f'finished announce grade')
    return
Пример #30
0
def event_times(input_file, org, token, deadline, target_team):
    """
    input-file: file contains list of repo-hash.

    repo-hash : string in <repo>:<hash> format
            hw0-ianre657:cb75e99
    """
    global github_organization
    global github_token

    try:
        parsed_repos = get_repo_infos(input_file)
    except FileNotFoundError as e:
        print(str(e))
        return
    ensure_gh_token(token)
    spinner = Halo(stream=sys.stderr)

    github_organization = org
    github_token = token

    print(f"deadline: {deadline}")

    submit_deadline = iso8601.parse_date(deadline)
    submit_deadline = submit_deadline.replace(tzinfo=LOCAL_TIMEZONE)

    spinner.info(f"Deadline : {submit_deadline}")
    success_group = []
    fail_group = []
    spinner.start("Start to check late submissions")

    # get team membershup info
    if target_team is not None:
        only_team_members = set(
            Team(org=github_organization,
                 team_slug=target_team,
                 github_token=github_token).members.keys())

    for idx, repo in enumerate(parsed_repos, start=1):
        # print("get commit time for {}".format(repo))
        if target_team is not None:
            import re

            user_id = re.sub("hw[\d]+-", "", repo.name)
            # print(f'user_id :{user_id}')
            if user_id not in only_team_members:
                continue
        spinner.text = f"({idx}/{len(parsed_repos)}) Checking {repo.name}"
        result = getRepoCommitTime(org=github_organization,
                                   repo=repo.name,
                                   commit_hash=repo.commit_hash)
        for r in result:
            # print(r)
            passed, delta = is_deadline_passed(
                submit_deadline, iso8601.parse_date(r.pushed_time))
            if passed:
                fail_group.append({
                    "repo-name": r.repo,
                    "commit-hash": r.commit_hash,
                    "time-passed": delta,
                    "last-pushtime": r.pushed_time,
                })
            else:
                success_group.append((r, delta))
                # print(f'{r}: {delta} later')
    spinner.succeed("Check finished")
    print("=" * 20, "REPORT", "=" * 20)
    print(f"Total submissions : {len(parsed_repos)}")
    print(f"late submissions: {len(fail_group)}")
    print(f"Submission Deadline: {submit_deadline}")
    print(tabulate(fail_group, headers="keys"))
Пример #31
0
    def write(args, generator, object_info, stream_info):
        """
        Write the livestream to file in real time.

        Calls previously defined private methods:

            SaveStream._create_skeleton()
            SaveStream._get_temp_filename()
            SaveStream._rename()

        Calls a public method from an external module:

            DisplayStream.display()

        Parameters
        ----------
        args: Namespace
            Namespace object containing all arguments that were defined in the CLI 
        generator: Reddit object generator
        object_info: str
            String denoting which Reddit objects are displayed in the stream
        stream_info: str
            String denoting the livestream information

        Returns
        -------
        stream_statistics: str
            String denoting the livestream statistics (Reddit objects, Subreddit
            or Redditor, and duration)
        """
        
        skeleton = SaveStream._create_skeleton(args)
        stream_path = SaveStream._get_temp_filename(stream_info)

        SaveStream._create_temp_file(skeleton, stream_path)

        with open(stream_path, "r+", encoding = "utf-8") as existing_file:
            stream_data = json.load(existing_file)

            start_stream = time.mktime(time.localtime())
            try:
                logging.info("")
                logging.info("STREAMING...")
                logging.info("")

                for obj in generator:
                    DisplayStream.display(obj)
                    stream_data["data"].append(obj)

                    existing_file.seek(0)
                    existing_file.truncate()
                    json.dump(stream_data, existing_file)

            except KeyboardInterrupt:
                end_stream = time.mktime(time.localtime())
                duration = time.strftime("%H:%M:%S", time.gmtime(end_stream - start_stream))
                stream_statistics = f"Streamed {object_info} submitted {stream_info} for {duration}."

                print("\n\n")
                Halo().info(Fore.YELLOW + Style.BRIGHT + "ABORTING LIVESTREAM.")
                logging.info("ABORTING LIVESTREAM.")
                logging.info("")

                Halo().info(stream_statistics)
                print()

                stream_data["livestream_metadata"]["stream_duration"] = duration
                stream_data["livestream_metadata"]["stream_end"] = time.strftime("%H:%M:%S", time.localtime(end_stream))
                stream_data["livestream_metadata"]["stream_start"] = time.strftime("%H:%M:%S", time.localtime(start_stream))

                existing_file.seek(0)
                existing_file.truncate()
                json.dump(stream_data, existing_file, indent = 4)

        save_spinner = Halo().start("Saving livestream.")
        SaveStream._rename(duration, object_info, time.strftime("%H:%M:%S", time.localtime(start_stream)), stream_path)
        save_spinner.info(Fore.GREEN + Style.BRIGHT + "Livestream has been saved to file.")

        logging.info("Livestream has been saved to file.")
        logging.info("")

        print()

        return stream_statistics
Пример #32
0
def patch_project(hw_prefix, patch_branch, source_repo, token, org, only_repo):
    '''Patch to student homeworks'''
    # init
    colorama_init(autoreset=True)
    spinner = Halo(stream=sys.stderr)

    if source_repo == '':
        source_repo = f'tmpl-{hw_prefix}-revise'

    # Check if repo already contains the patched branch. Skip if so.
    #  api : https://developer.github.com/v3/git/refs/#get-a-reference
    res = requests.get(
        f"https://api.github.com/repos/{org}/{source_repo}/git/refs/heads/{patch_branch}",
        headers=github_headers(token))
    if res.status_code != 200:  # this branch not exists on the remote
        spinner.fail(
            f"branch : `{patch_branch}` doesn't exist on repo:{org}/{source_repo} "
        )
        return

    cur = Path('.')
    for d in cur.glob("patch-*"):
        shutil.rmtree(d)
    spinner.info("delete dated folder")

    spinner.start(
        f"Fetch issue template {Fore.CYAN}{patch_branch} {Fore.RESET}from {Fore.CYAN}{source_repo}"
    )
    # Fetch patch template on the source repo
    issues = get_github_endpoint_paged_list(
        endpoint=f"repos/{org}/{source_repo}/issues",
        github_token=token,
        verbose=False)
    issue_tmpl_found = False
    for i in issues:
        if i['title'] == patch_branch:
            issue_tmpl_found = True
            issue_tmpl_body = i['body']
            break
    if not issue_tmpl_found:
        raise Exception(
            f"cannot found issue tmpl `{patch_branch}` on `{source_repo}`")
    spinner.succeed()

    root_folder = Path(
        tempfile.mkdtemp(prefix="patch-{}-{}-".format(
            patch_branch,
            datetime.now().strftime("%b%d%H%M%S")),
                         dir="."))
    spinner.succeed(f"Create tmp folder {Fore.YELLOW}{root_folder}")

    spinner.info(
        f"Fetch source repo {Fore.CYAN}{source_repo}{Style.RESET_ALL} from GitHub"
    )
    src_repo_path = root_folder / "source_repo"
    sp.run([
        'git',
        'clone',
        f'https://github.com/{org}/{source_repo}.git',
        src_repo_path.name,
    ],
           cwd=root_folder)

    src_repo = Repo(src_repo_path)
    sp.run(['git', 'checkout', '--track', f'origin/{patch_branch}'],
           cwd=src_repo_path,
           stdout=sp.DEVNULL,
           stderr=sp.DEVNULL)
    spinner.succeed()

    # Pasting changed files into students repo
    src_repo_git = src_repo.git
    src_repo_git.checkout(patch_branch)
    changed_files, renamed_files = get_changed_files(
        master_commit=src_repo.heads['master'].commit,
        patch_commit=src_repo.heads[patch_branch].commit)

    spinner.start("Fetch information for homework repo")
    spinner.succeed()
    if only_repo is not None:
        repos = [
            re for re in query_matching_repos(org,
                                              github_repo_prefix=only_repo,
                                              github_token=token,
                                              verbose=False)
            if re['name'] == only_repo
        ]
        repo = next(iter(repos), None)
        if repo:
            spinner.info(
                f"Only patch to repo : {Fore.YELLOW}{repo['name']}{Style.RESET_ALL}"
            )
        repos = [repo]
    else:
        repos = query_matching_repos(org,
                                     github_repo_prefix=hw_prefix,
                                     github_token=token,
                                     verbose=False)
    spinner.succeed()

    # Patch to student repos
    student_path = root_folder / "student_repos"
    student_path.mkdir()
    for repo_idx, r in enumerate(repos, start=1):
        pre_prompt_str = f"({repo_idx}/{len(repos)}) {Fore.YELLOW}{r['name']}{Fore.RESET}"
        spinner.start()

        # Check if repo already contains the patched branch. Skip if so.
        #  api : https://developer.github.com/v3/git/refs/#get-a-reference
        res = requests.get(
            f"https://api.github.com/repos/{org}/{r['name']}/git/refs/heads/{patch_branch}",
            headers=github_headers(token))
        if res.status_code == 200:  # this branch exists in the remote
            spinner.text = pre_prompt_str + \
                f" {Back.GREEN}{Fore.BLACK} Skip {Style.RESET_ALL} already patched"
            spinner.succeed()
            continue

        spinner.text = pre_prompt_str + \
            f" {Fore.BLUE}cloning repo..{Fore.RESET}"
        sp.run(['git', 'clone', '--depth=1', r['html_url']],
               cwd=student_path,
               stdout=sp.DEVNULL,
               stderr=sp.DEVNULL)

        hw_repo_name = r['html_url'].rsplit("/")[-1]

        # open a new branch & checkout to that branch
        sp.run(['git', 'checkout', '-b', patch_branch],
               cwd=student_path / hw_repo_name,
               stdout=sp.DEVNULL,
               stderr=sp.DEVNULL)

        # copy file to student repo
        for f in changed_files.keys():
            (student_path / hw_repo_name / f).parent.mkdir(parents=True,
                                                           exist_ok=True)
            shutil.copyfile(src=src_repo_path / f,
                            dst=student_path / hw_repo_name / f)
        for f in renamed_files.keys():
            os.remove(student_path / hw_repo_name / f)

        # changed_files = get_changed_files(
        #     master_commit = src_repo.heads['master'].commit,
        #     patch_commit = src_repo.heads[patch_branch].commit
        # )
        # push (publish) that branch to student repo
        sp.run(['git', 'add', '.'],
               cwd=student_path / hw_repo_name,
               stdout=sp.DEVNULL,
               stderr=sp.DEVNULL)

        # Pass if no changed
        student_repo = Repo(student_path / hw_repo_name)
        if len(student_repo.index.diff("HEAD")) == 0:
            spinner.text = pre_prompt_str + \
                f" {Back.GREEN}{Fore.BLACK} Passed {Style.RESET_ALL} Repo no change"
            spinner.succeed()
            continue

        sp.run([
            'git', 'commit', '-m',
            f':construction_worker: Patch: {patch_branch}'
        ],
               cwd=student_path / hw_repo_name,
               stdout=sp.DEVNULL,
               stderr=sp.DEVNULL)

        spinner.text = pre_prompt_str + \
            f" {Fore.BLUE}publish patch to remote..{Fore.RESET}"
        res = sp.run(['git', 'push', '-u', 'origin', patch_branch],
                     cwd=student_path / hw_repo_name,
                     stdout=sp.DEVNULL,
                     stderr=sp.DEVNULL)
        if res.returncode != 0:
            spinner.text = (
                pre_prompt_str +
                f" {Back.RED}{Fore.BLACK} Failed {Style.RESET_ALL}" +
                f" Cannot push branch {Fore.CYAN}{patch_branch}{Fore.RESET} to origin"
            )
            spinner.fail()
            continue

        # open an pull-request on students repo
        # student_repo/patch-branch  -> student_repo/master
        body = {
            "title": f"[PATCH] {patch_branch}",
            "body": issue_tmpl_body,
            "head": patch_branch,
            "base": "master"
        }
        res = requests.post(
            f"https://api.github.com/repos/{org}/{r['name']}/pulls",
            headers=github_headers(token),
            json=body)
        if res.status_code == 201:
            spinner.text = pre_prompt_str + \
                f" {Fore.BLACK}{Back.GREEN} Patched {Style.RESET_ALL}"
            spinner.succeed()
        else:
            spinner.text = (
                pre_prompt_str +
                f" {Back.RED}{Fore.BLACK} Failed {Style.RESET_ALL}" +
                f" Cannot create PR {Fore.CYAN}{patch_branch}{Fore.RESET} to origin/master"
            )
            spinner.fail()
            try:
                print(f"    {Fore.RED}{res.json()['errors'][0]['message']}")
            except:
                pass
            continue

    # TODO : print summary after patch
    #       how many success, skiped, failed
    pass