コード例 #1
0
 def save_analysis(self):
     print_info(
         'Total multipart is: %s,redirect is: %s,request exception is: %s' %
         (len(MULTIPART), len(REDIRECT), len(REQUEST_ERROR)))
     self.save_multipart()
     self.save_redirect()
     self.save_request_exception()
コード例 #2
0
 def verify(self):
     """Verify pypi file with MD5."""
     global EMAIL
     util.print_info("Searching for package information in pypi")
     name, release = self.parse_name()
     info = PyPiVerifier.get_info(name)
     releases_info = info.get('releases', None)
     if releases_info is None:
         self.print_result(
             False,
             err_msg='Error in package info from {}'.format(PYPIORG_API))
         return None
     release_info = releases_info.get(release, None)
     if release_info is None:
         self.print_result(
             False,
             err_msg='Information for package {} with release {} not found'.
             format(name, release))
         return None
     release_info = self.get_source_release(
         os.path.basename(self.package_path), release_info)
     package_info = info.get('info', None)
     if package_info is not None:
         EMAIL = package_info.get('author_email', '')
     self.md5_digest = release_info.get('md5_digest', '')
     return self.verify_md5()
コード例 #3
0
    def save_traffic(traffic_obj_list, id, piece=3000):
        """

        :param traffic_obj_list:
        :param id: task id
        :param piece: default 3000
        :return:
        """
        traffic_path = Engine.get_traffic_path(id)
        if len(traffic_obj_list) > 0:
            saved_traffic_list = [i for i in traffic_obj_list]
            # slice traffic if too large
            if len(saved_traffic_list) > piece:
                traffic_divided_path = []
                traffic_divided = divide_list(saved_traffic_list, piece)
                for i in range(len(traffic_divided)):
                    traffic_divided_path.append(traffic_path + str(i))
                    with open(traffic_path + str(i), 'w') as traffic_f:
                        cPickle.dump(traffic_divided[i], traffic_f)
                print_info('Traffic of %s has been divided and saved to %s.' %
                           (id, ','.join(traffic_divided_path)))
            else:
                with open(traffic_path, 'w') as traffic_f:
                    cPickle.dump(saved_traffic_list, traffic_f)
                    print_info('Traffic of %s has been saved to %s.' %
                               (id, traffic_path))
コード例 #4
0
def attempt_verification_per_domain(package_path, url):
    """Use url domain name to set verification type."""
    netloc = urlparse(url).netloc
    if any(loc in netloc for loc in PYPI_DOMAINS):
        domain = 'pypi'
    elif 'download.gnome.org' in netloc:
        domain = 'gnome.org'
    elif 'download.qt.io' in netloc:
        domain = 'qt.io'
    else:
        domain = 'unknown'
    verifier = {
        'pypi': PyPiVerifier,
        'gnome.org': GnomeOrgVerifier,
        'qt.io': QtIoVerifier,
    }.get(domain, None)

    if verifier is None:
        util.print_info('Skipping domain verification')
        return None
    else:
        util.print_info('Verification based on domain {}'.format(domain))
        return apply_verification(verifier, **{
                                  'package_path': package_path,
                                  'url': url})
コード例 #5
0
def parse_media_info(filename):
    """ Given a filename, match media info and return MediaInfo object. Returns empty MediaInfo if no matches."""
    print_info('Extracting hash from {0}'.format(filename))
    media_info = MediaInfo()
    for media_info_type in MEDIA_INFO_REGEXS:
        #print_info('Parsing for {0}'.format(media_info_type))
        for regex in MEDIA_INFO_REGEXS[media_info_type]:
            m = re.search(regex, filename)

            if m is None:
                continue

            extracted_data = m.group('MediaInfo').upper()
            print_info('Extracted {0}: {1}'.format(media_info_type,
                                                   extracted_data))

            # Before we set, do any needed cleanup
            if media_info_type == 'resolution':
                if not extracted_data.endswith('p'):
                    resolution = int(extracted_data)
                    if resolution == 1280:
                        extracted_data = '720'
                    extracted_data = extracted_data + 'p'
                media_info.resolution = extracted_data
            if media_info_type == 'source':
                media_info.source = extracted_data.replace('-', '')
            elif media_info_type == 'audio_source':
                media_info.audio_source = extracted_data
            elif media_info_type == 'encoding':
                media_info.encoding = re.sub('X', 'H', extracted_data)
            elif media_info_type == 'color_bits':
                media_info.color_bits = extracted_data
            break

    return media_info
コード例 #6
0
ファイル: core.py プロジェクト: jade-scout/URLHarvest
def report_tool_performance(extracted_url_list, start_time, end_time):

    URLHarvestResults.target.insert(2, str(round(end_time - start_time, 4)))
    URLHarvestResults.target.insert(3, str(len(extracted_url_list) - URLHarvestResults.list_header_len))

    print('')
    util.print_info('Extraction Time: ' + URLHarvestResults.target[2] + ' seconds')
    util.print_info('URLs Processed: ' + URLHarvestResults.target[3])
    return
コード例 #7
0
def parse_manga_title(filename):
    """ Attempts to parse manga title from filename. Will strip out separators at start of string. If no title is found, returns empty string"""
    print_info('Attempting to parse manga title from {0}'.format(filename))
    for regex in MANGA_TITLE_REGEX:
        m = re.search(regex, filename)

        if m is None:
            continue

        extracted_title = m.group('Series')
        return clean_episode_title(extracted_title)
    return ''
コード例 #8
0
def parse_anime_episode_title(filename):
    """ Attempts to parse episode title from filename. Will strip out separators at start of string. If no title is found, returns empty string"""
    print_info('Attempting to parse episode title from {0}'.format(filename))
    for regex in ANIME_EPISODE_TITLE_REGEXS:
        m = re.search(regex, filename)

        if m is None:
            continue

        extracted_title = m.group('EpisodeTitle')
        return clean_episode_title(extracted_title)
    return ''
コード例 #9
0
 def verify_md5(self):
     """Verify MD5."""
     util.print_info("Verifying MD5 digest")
     if self.md5_digest is None:
         self.print_result(False, err_msg='Verification requires a md5_digest')
         return None
     if os.path.exists(self.package_path) is False:
         self.print_result(False, err_msg='{} not found'.format(self.package_path))
         return None
     md5_digest = self.calc_sum(self.package_path, hashlib.md5)
     self.print_result(md5_digest == self.md5_digest)
     return md5_digest == self.md5_digest
コード例 #10
0
 def verify(self, recursion=False):
     """Verify file using gpg signature."""
     global KEYID
     global EMAIL
     util.print_info("Verifying GPG signature")
     if os.path.exists(self.package_path) is False:
         self.print_result(False,
                           err_msg='{} not found'.format(self.package_path))
         return None
     if os.path.exists(self.package_sign_path) is False and self.get_sign(
     ) is not True:
         self.print_result(False,
                           err_msg='{} not found'.format(
                               self.package_sign_path))
         return None
     if sign_isvalid(self.package_sign_path) is False:
         self.print_result(False,
                           err_msg='{} is not a GPG signature'.format(
                               self.package_sign_path))
         try:
             os.unlink(self.package_sign_path)
         except Exception:
             pass
         return None
     # valid signature exists at package_sign_path, operate on it now
     keyid = get_keyid(self.package_sign_path)
     # default location first
     pubkey_loc = self.pubkey_path.format(keyid)
     if not os.path.exists(pubkey_loc):
         # attempt import the key interactively if set to do so
         self.print_result(False, 'Public key {} not found'.format(keyid))
         if not self.interactive or recursion:
             return None
         if attempt_key_import(keyid, self.pubkey_path.format(keyid)):
             return self.verify(recursion=True)
         return None
     # public key exists or is imported, verify
     EMAIL = get_email(pubkey_loc)
     sign_status = verify_cli(pubkey_loc, self.package_path,
                              self.package_sign_path)
     if not sign_status:
         if self.config.old_keyid:
             compare_keys(KEYID_TRY, self.config.old_keyid)
         self.print_result(self.package_path)
         KEYID = KEYID_TRY
         self.config.signature = self.key_url
         self.config.config_opts['verify_required'] = True
         self.config.rewrite_config_opts()
         return True
     else:
         self.print_result(False, err_msg=sign_status.strerror)
         self.quit()
コード例 #11
0
def parse_anime_hash(filename):
    """ Given a filename, match hash and return hash with brackets. Returns None if no matches."""
    print_info('Extracting hash from {0}'.format(filename))
    for regex in ANIME_HASH_REGEXS:
        m = re.search(regex, filename)

        if m is None:
            continue

        ep_hash = m.group('Hash').upper()
        print_info('Extracted Hash: {0}'.format(ep_hash))
        return ep_hash

    return ''
コード例 #12
0
def parse_episode(filename):
    """ Given a filename, matches episode and returns episode in E01 format. This will ignore episode parts. Returns None if no matches."""
    print_info('Extracting episode from {0}'.format(filename))
    for regex in EPISODE_NUM_REGEXS:
        m = re.search(regex, filename)

        if m is None:
            continue

        extracted_ep = m.group('Episode').lower()
        print_info('Extracted episode: {0}'.format(extracted_ep))

        if '-' in extracted_ep:
            print_info('Multiple Episodes found')
            tokens = extracted_ep.split('-e')
            first_token = tokens[0]
            last_token = tokens[len(tokens) - 1]
            return parse_episode(first_token) + '-' + parse_episode(last_token)
        else:
            ep_num = int(extracted_ep)
            if ep_num is not None and ep_num > 0:
                print_info('Episode might be: {0}'.format(ep_num))
                return 'E' + format_num(ep_num)

    return None
コード例 #13
0
def parse_anime_group(filename):
    """ Given a filename, match anime sub group and return group without brackets. Returns None if no matches."""
    print_info('Extracting hash from {0}'.format(filename))
    for regex in ANIME_GROUP_REGEXS:
        m = re.search(regex, filename)

        if m is None:
            continue

        ep_group = m.group('Group')
        print_info('Extracted Group: {0}'.format(ep_group))
        return ep_group

    return None
コード例 #14
0
def parse_episode_part(filename):
    """ Given a filename, attempts to match a part num (a = 1, b = 2) from the title. Returns 0 if no matches. """
    print_info('Extracting part num from {0}'.format(filename))
    baseline = ord('a')

    for regex in EPISODE_PART_REGEXS:
        m = re.search(regex, filename)

        if m is None:
            continue

        extracted_part = m.group('Part').lower()
        print_info('Extracted Part: {0}'.format(extracted_part))

        # Convert into int
        part_num = ord(extracted_part) - baseline + 1
        return part_num

    return 0
コード例 #15
0
ファイル: git.py プロジェクト: insilications/autospec
def git_describe_custom_re(clone_path, conf):
    outputVersion1 = ""
    git_describe_cmd1 = f"git describe --abbrev=0 --tags"
    git_describe_cmd1_result = ""
    process = subprocess.run(
        git_describe_cmd1,
        check=False,
        shell=True,
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT,
        text=True,
        universal_newlines=True,
        cwd=clone_path,
    )
    git_describe_cmd1_result = process.stdout

    if git_describe_cmd1_result:
        #if util.debugging:
            #print_debug(f"conf.custom_git_re2: {conf.custom_git_re2}")
        git_describe_cmd2_re1_pre = r"{0}".format(conf.custom_git_re2)
        git_describe_cmd2_re1 = re.Pattern
        try:
            git_describe_cmd2_re1 = re.compile(git_describe_cmd2_re1_pre, re.MULTILINE)
        except re.error as err:
                print_fatal(f"Custom git regex: {git_describe_cmd2_re1.pattern}")
                print_fatal(f"Unable to create custom git regex: {err}")
        print_info(f"Custom git regex 2: {git_describe_cmd2_re1.pattern}")
        git_describe_cmd2_re1_result = git_describe_cmd2_re1.search(git_describe_cmd1_result)
        if git_describe_cmd2_re1_result:
            #if util.debugging:
                #print_debug(f"{git_describe_cmd2_re1_result.group(1)}.{git_describe_cmd2_re1_result.group(2)}.{git_describe_cmd2_re1_result.group(3)}.{git_describe_cmd2_re1_result.group(4)}.{git_describe_cmd2_re1_result.group(5)}.{git_describe_cmd2_re1_result.group(6)}.{git_describe_cmd2_re1_result.group(7)}")
            if git_describe_cmd2_re1_result.group(1):
                outputVersion1 = f"{git_describe_cmd2_re1_result.group(1)}"
            if git_describe_cmd2_re1_result.group(2):
                outputVersion1 = f"{outputVersion1}.{git_describe_cmd2_re1_result.group(2)}"
            if git_describe_cmd2_re1_result.group(3):
                outputVersion1 = f"{outputVersion1}.{git_describe_cmd2_re1_result.group(3)}"
            if git_describe_cmd2_re1_result.group(4):
                outputVersion1 = f"{outputVersion1}.{git_describe_cmd2_re1_result.group(4)}"
            if git_describe_cmd2_re1_result.group(5):
                outputVersion1 = f"{outputVersion1}{git_describe_cmd2_re1_result.group(5)}"
            return outputVersion1
コード例 #16
0
def main():
    """Start program."""
    parser = argparse.ArgumentParser(description="jsFBP is a simple program"
                                     "for moving and copying files.")
    parser.add_argument("config_file", help="name of the configuration file")

    args = parser.parse_args()

    directories = core.DirectoryPaths(args.config_file)
    files = core.Files(args.config_file)
    path_line_num = 0

    i = 0
    syntax_check: bool = True

    # A fail-safe precaution. If the program somehow gets trapped in
    # potentially infinite loop, the i counter will force it to stop. This also
    # means you can only have 64 different file lists in the same configuration
    # file.

    util.print_info("Checking configuration file syntax...")

    while i < MAX_ITERATIONS:
        paths = directories.find(path_line_num)
        if paths == -1:
            if syntax_check is True:
                syntax_check = False
                if util.ERRORS_FOUND == 0:
                    i = 0
                    path_line_num = 0
                    files.last_file_line = 0
                    paths = directories.find(path_line_num)
                else:
                    break
            else:
                break
        file_list = files.file_list(path_line_num + HEADER_LENGTH,
                                    os.fsdecode(paths[0]))
        action = files.action(path_line_num + HEADER_LENGTH - 1)

        if not (syntax_check) and util.ERRORS_FOUND == 0:
            util.print_info("Processing file list #: {}".format(i + 1))
            core.backup_files(paths[0], paths[1], file_list, action)
        path_line_num = files.last_file_line + 1
        i += 1

    if util.ERRORS_FOUND == 0:
        util.print_info("Success!")
    else:
        util.print_info("{} Error(s) found. Program aborted".format(
            util.ERRORS_FOUND))
コード例 #17
0
    def verify_sum(self, shasum):
        """Verify sha sum."""
        util.print_info("Verifying sha{}sum digest".format(self.shalen))
        if shasum is None:
            self.print_result(False, err_msg='Verification requires shasum')
            return None
        if os.path.exists(self.package_path) is False:
            self.print_result(False, err_msg='{} not found'.format(self.package_path))
            return None

        sha_algo = {
            256: hashlib.sha256
        }.get(self.shalen, None)

        if sha_algo is None:
            self.print_result(False, err_msg='sha{} algorithm not found'.format(self.shalen))
            return None

        digest = self.calc_sum(self.package_path, sha_algo)
        self.print_result(digest == shasum)
        return digest == shasum
コード例 #18
0
    def verify(self):
        """Verify ruby gem based on sha sum."""
        gemname = os.path.basename(self.package_path).replace('.gem', '')
        util.print_info("Verifying SHA256 checksum")
        if os.path.exists(self.package_path) is False:
            self.print_result(False, 'GEM was not found {}'.format(self.package_path))
            return
        name, _ = re.split(r'-\d+\.', gemname)
        number = gemname.replace(name + '-', '')
        geminfo = GEMShaVerifier.get_rubygems_info(name)
        gemsha = GEMShaVerifier.get_gemnumber_sha(geminfo, number)

        if geminfo is None:
            self.print_result(False, "unable to parse info for gem {}".format(gemname))
        else:
            calcsha = self.calc_sum(self.package_path, hashlib.sha256)
            self.print_result(gemsha == calcsha)
            result = gemsha == calcsha
            if result is False:
                self.quit()
            return result
コード例 #19
0
def check(url, config, interactive=True):
    """Run verification based on tar file url."""
    package_name = filename_from_url(url)
    package_path = os.path.join(config.download_path, package_name)
    package_check = get_integrity_file(package_path)
    try:
        interactive = interactive and sys.stdin.isatty()
    except ValueError:
        interactive = False
    print(SEPT)
    util.print_info('Performing package integrity verification')
    verified = None
    if package_check is not None:
        verified = from_disk(url, package_path, package_check, config, interactive=interactive)
    elif package_path[-4:] == '.gem':
        signature_file = get_signature_file(url, config.download_path)
        verified = from_disk(url, package_path, signature_file, config, interactive=interactive)
    else:
        util.print_info('None of {}.(asc|sig|sign|sha256) is found in {}'.format(package_name, config.download_path))
        signature_file = get_signature_file(url, config.download_path)
        if signature_file is not None:
            verified = from_disk(url, package_path, signature_file, config, interactive=interactive)
            if verified is None:
                util.print_info('Unable to find a signature')
                verified = attempt_verification_per_domain(package_path, url)
        else:
            verified = attempt_verification_per_domain(package_path, url)

    if verified is None and config.config_opts['verify_required']:
        quit_verify()
    elif verified is None:
        print(SEPT)
    return verified
コード例 #20
0
ファイル: ci.py プロジェクト: incnb/jenkins-ci-client
def console_output(conn, git_info):
    """
    Retrieve the console output of the most recently completed build.

    :param conn: An instance of jenkins.Jenkins, as initialized by init_jenkins_conn()
    :param git_info: A dictionary of the working directory repository's git parameters, as returned by load_git_info()
    """
    try:
        last_build_number = conn.get_job_info(
            util.test_job_name(
                git_info['job_name']))['lastCompletedBuild']['number']
    except jenkins.NotFoundException:
        util.print_error('No builds exist yet for this project.')
        return
    util.print_info(
        'Most recent build number for project {job_name} is {build_number}.'.
        format(
            job_name=git_info['job_name'],
            build_number=last_build_number,
        ))
    print conn.get_build_console_output(
        util.test_job_name(git_info['job_name']), last_build_number)
コード例 #21
0
ファイル: ci.py プロジェクト: incnb/jenkins-ci-client
def load_git_info():
    """
    Get all info for the git repository in the current working directory.

    :return: A dictionary of git parameters
    """
    try:
        repo = git.Repo()
        git_info = {
            'branch': repo.active_branch.name,
            'job_name': repo.git.working_dir.split('/')[-1],
        }
        util.print_info('Job/project ID is {job_name}.'.format(
            job_name=git_info['job_name']))
        util.print_info(
            'Current branch is {branch}.'.format(branch=git_info['branch']))
        print ''
        return git_info
    except git.InvalidGitRepositoryError:
        util.print_error(
            'Could not read the git repository in the current working directory!'
        )
        sys.exit(1)
コード例 #22
0
ファイル: core.py プロジェクト: jade-scout/URLHarvest
 def create_file(self):
     file_name = self.create_file_name()
     results_file = self.write_header(file_name)
     
     entry = URLHarvestResults.list_header_len
     if self.url_list[0] == 'keystring_sort':
         results_file.writelines('\n' + self.url_list[2] + ' search result(s) matching \'' + self.url_list[1] + '\':\n-----------------------------------------------------------------\n')
     
     while entry < len(self.url_list):
         element = 0
         while element < len(self.url_list[entry]):
             line = ''
             if element > 0:
                 line = '\t'
             results_file.write(line + str(self.url_list[entry][element]) + '\n')
             element += 1
         entry += 1
         
         if entry - URLHarvestResults.list_header_len == int(self.url_list[2]):
             results_file.writelines('-----------------------------------------------------------------\n\n')
         
     results_file.close()
     util.print_info('Finished writing results file: ' + file_name)
     return
コード例 #23
0
def parse_season(filename):
    """ Attempts to parse Season from filename. If no season is found, returns S01. """
    print_info('Attempting to parse {0}'.format(filename))
    print_info('Extracting season from {0}'.format(filename))
    for regex in SEASON_REGEX:
        m = re.search(regex, filename)

        if m is None:
            continue

        extracted_season = m.group('Season').lower()
        print_info('Extracted season: {0}'.format(extracted_season))

        season_num = int(extracted_season)
        if season_num is not None and season_num > 0:
            print_info('Season might be: {0}'.format(season_num))
            return 'S' + format_num(season_num)
    return 'S01'
コード例 #24
0
ファイル: git.py プロジェクト: insilications/autospec
def git_clone(url, path, cmd_args, clone_path, force_module, force_fullclone, is_fatal=True):
    try:
        if force_module is True:
            if force_fullclone is True:
                print_info(f"git clone -j8 --branch={cmd_args}")
                call(f"git clone -j8 --branch={cmd_args}", cwd=path)
            else:
                print_info(f"git clone --single-branch -j8 --branch={cmd_args}")
                call(f"git clone --depth=1 --single-branch -j8 --branch={cmd_args}", cwd=path)
        else:
            if force_fullclone is True:
                print_info(f"git clone --recurse-submodules -j8 --branch={cmd_args}")
                call(f"git clone --recurse-submodules -j8 --branch={cmd_args}", cwd=path)
            else:
                print_info(f"git clone --single-branch --shallow-submodules --recurse-submodules -j8 --branch={cmd_args}")
                call(f"git clone --depth=1 --single-branch --shallow-submodules --recurse-submodules -j8 --branch={cmd_args}", cwd=path)
    except subprocess.CalledProcessError as err:
        if is_fatal:
            remove_clone_archive(path, clone_path, is_fatal)
            print_fatal(f"Unable to clone {url} in {clone_path}: {err}")
            sys.exit(1)
コード例 #25
0
def test_report():
    base_location = os.path.dirname(os.path.realpath(__file__))
    pdb_path = os.path.join(base_location, '..', '..', 'test', 'pytest_data',
                            '1bna.pdb')

    report = util.print_info(pdb_path)
    num_lines = len(report.split('\n'))

    reference_report = '''Found 566 atoms and 25 residues.
Residue Number       Starting atom        Residue Name        
         1                    1                    DC         
         2                    17                   DG         
         3                    39                   DC         
         4                    58                   DG         
         5                    80                   DA         
         6                   101                   DA         
         7                   122                   DT         
         8                   142                   DT         
         9                   162                   DC         
         10                  181                   DG         
         11                  203                   DC         
         12                  222                   DG         
         13                  244                   DC         
         14                  260                   DG         
         15                  282                   DC         
         16                  301                   DG         
         17                  323                   DA         
         18                  344                   DA         
         19                  365                   DT         
         20                  385                   DT         
         21                  405                   DC         
         22                  424                   DG         
         23                  446                   DC         
         24                  465                   DG         
         25                  487                solvent       '''

    num_lines_reference = len(reference_report.split('\n'))

    assert reference_report in report
    assert num_lines == num_lines_reference + 3
コード例 #26
0
def parse_volume(filename):
    """ Attempts to parse Volume from filename. If no season is found, returns Volume 0. """
    print_info('Extracting volume from {0}'.format(filename))
    for regex in MANGA_VOLUME_REGEX:
        m = re.search(regex, filename)

        if m is None:
            continue

        extracted_season = m.group('Volume').lower()
        print_info('Extracted volume: {0}'.format(extracted_season))

        season_num = int(extracted_season)
        if season_num is not None and season_num > 0:
            print_info('Season might be: {0}'.format(season_num))
            return 'Volume ' + format_num(season_num)
    return 'Volume 0'
コード例 #27
0
def parse_anime_episode(filename):
    """ Given a filename, matches episode and returns episode in E01 format. This will ignore episode parts. Returns None if no matches."""
    print_info('Extracting episode from {0}'.format(filename))
    for regex in ANIME_EPISODE_NUM_REGEXS:
        m = re.search(regex, filename)

        if m is None:
            continue

        extracted_ep = m.group('Episode')
        print_info('Extracted episode: {0}'.format(extracted_ep))

        ep_num = int(extracted_ep)
        if ep_num is not None and ep_num > 0:
            print_info('Episode might be: {0}'.format(ep_num))
            return 'E' + format_num(ep_num)

    return None
コード例 #28
0
def parse_chapter(filename):
    """ Given a filename, matches chapter and returns episode in Chapter 01 format. This will ignore episode parts. Returns None if no matches."""
    print_info('Extracting chapter from {0}'.format(filename))
    for regex in MANGA_CHAPTER_REGEX:
        m = re.search(regex, filename)

        if m is None:
            continue

        extracted_ep = m.group('Chapter')
        print_info('Extracted chapter: {0}'.format(extracted_ep))

        ep_num = int(extracted_ep)
        if ep_num is not None and ep_num > 0:
            print_info('Chapter might be: {0}'.format(ep_num))
            return 'Chapter ' + format_num(ep_num)

    return None
コード例 #29
0
ファイル: offline_session.py プロジェクト: parsa/scimitar
def _attach_pids(pid_dict):
    mgr = console.SessionManager()

    gdb_cmd = gdb_config['cmd']
    gdb_attach_tmpl = gdb_config['attach']

    tag_counter = 0

    tasks = {}
    msgs = {}

    # Start GDB instances
    for host in pid_dict.iterkeys():
        for pid in pid_dict[host]:
            tag_counter += 1
            tag = str(tag_counter)

            # Build the command line and launch GDB
            cmd = gdb_cmd + [gdb_attach_tmpl.format(pid=pid)]
            cmd_str = ' '.join(cmd)

            attach_pid_task = _AttachPidThread(host, pid, tag, cmd_str)
            tasks[tag] = attach_pid_task
            attach_pid_task.start()

    for tag, task in tasks.iteritems():
        print_info('Connecting to Process "{pid}" on "{host}"...',
                   host=task.host or 'localhost',
                   pid=task.pid)

        task.join()
        print_info('Connected.')

        session, mi_response = task.report()

        mgr.add(session)
        msgs[tag] = mi_response

    print_info('Beginning debugging session...')
    return mgr, msgs
コード例 #30
0
        :param coroutine:
        :return:
        """
        from gevent import monkey
        monkey.patch_all()
        result = []
        geventPool = pool.Pool(coroutine)
        tasks = [
            geventPool.spawn(Verify.request_and_verify, case)
            for case in case_list
        ]
        gevent.joinall(tasks)
        for i in tasks:
            if i.value is not None:
                result.append(i.value)
        print_info('Total Verify-Case is: %s, %s error happened.' %
                   (len(case_list), Verify.ERROR_COUNT))
        return result

    class Openner(Process):
        def __init__(self, browser_type, case_list):
            Process.__init__(self)
            self.browser = browser_type
            self.case_list = case_list

        def reload(self, browser):
            # close old
            browser.quit()
            # restart
            if self.browser == 'chrome':
                browser = chrome()
            else: