Esempio n. 1
0
        def filter_empty_income(incomes):
            employer_incomes = incomes.get('employer_incomes')
            other_incomes = incomes.get('other_incomes')
            employer_incomes = _.filter(employer_incomes, lambda x: x.get('amount', None))
            other_incomes = _.filter(other_incomes, lambda x: x.get('amount', None))

            return employer_incomes + other_incomes
def get_dataset(overlap=5,
                window_size=10,
                time_steps=20,
                language=None,
                max_len=80):
    if language:
        dataset = _.filter(processed, lambda x: x['language'] == language)
    else:
        dataset = processed
    num_features = 7  # number of features on each time step
    # x,y,meta = [],[],[]
    genuine = forged = None
    if not max_len:
        pass
    else:
        for val in dataset:
            update = False
            g = val['values'][:20]
            f = val['values'][20:]

            g = np.array(
                _.map(
                    _.filter(g, lambda i: len(i) >= max_len), lambda x: window(
                        trim(x, max_len).reshape(-1), window_size *
                        num_features, overlap * num_features, True)))
            f = np.array(
                _.map(
                    _.filter(f, lambda i: len(i) >= max_len), lambda x: window(
                        trim(x, max_len).reshape(-1), window_size *
                        num_features, overlap * num_features, True)))
            if genuine is None and g.size > 0:
                genuine = g
                update = True
            if forged is None and f.size > 0:
                forged = f
                update = True
                continue
            if (g.size > 0 or f.size > 0) and not update:
                genuine = genuine if g.size == 0 else np.append(
                    genuine, g, axis=0)
                forged = forged if f.size == 0 else np.append(
                    forged, f, axis=0)

        print(genuine.shape
              )  # (num_samples, num_time_steps, window_size*num_features)
        print(
            f'Options\nWindow size --> {window_size}\nOverlap (in samples) --> {overlap}\nMax length {max_len}\n{"— "*20}'
        )
        print(
            f'Number of time steps --> {genuine.shape[1]}\nNumber of features at each step --> {genuine.shape[2]}'
        )
        print(f'Number of genuine samples --> {genuine.shape[0]}')
        print(f'Number of forged samples --> {forged.shape[0]}')
        x = {"genuine": genuine, 'forged': forged}
        return x
Esempio n. 3
0
    def _to_release_note_lines(
        self,
        nodes: typing.List[TitleNode],
        level: int,
        rls_note_objs: typing.List[ReleaseNote],
    ) -> typing.List[str]:
        lines = list()
        for node in nodes:
            filtered_rls_note_objects = _.filter(
                rls_note_objs, lambda rls_note_obj: get_or_call(
                    rls_note_obj, node.matches_rls_note_field_path) in node.
                identifiers)
            if not filtered_rls_note_objects:
                continue
            if node.nodes:
                release_note_lines = self._to_release_note_lines(
                    nodes=node.nodes,
                    level=level + 1,
                    rls_note_objs=filtered_rls_note_objects)
                lines.append(self._title(node, level))
                lines.extend(release_note_lines)
            else:
                bullet_points = self._to_bullet_points(
                    tag=node.title, rls_note_objs=filtered_rls_note_objects)
                # title is used as bullet point tag -> no need for additional title
                lines.extend(bullet_points)

        return lines
Esempio n. 4
0
    def __search_deep(self, signal: S):
        value = self.__search_current(signal)
        if value is not None:
            return value

        # gather all value from children if it cannot be found in current store
        values = _.filter(
            _.map(self.children, lambda store: store.__search_deep(signal)),
            lambda x: x != None)
        if values:
            return _.flatten(values)
Esempio n. 5
0
    def __match_list(self, template_list, data_list, path, current_store):
        # To handle xmltodict, if the template request a list, but the data isn't one,
        # make the data the only element in the list
        if not isinstance(data_list, self.list_like):
            data_list = [data_list]

        for idx, template in enumerate(template_list):
            for data in data_list:
                self.match(template, data, self.__next_path(path, str(idx)),
                           self.__new_store(current_store))
        # When no processing happen (e.g: static matching), the store will be empty
        # clean empty store created.
        current_store.children = _.filter(current_store.children,
                                          lambda x: bool(x.values))
Esempio n. 6
0
    def reachable_release_tags_from_commit(
            self, repo: git.Repo,
            commit: git.objects.Commit) -> typing.List[str]:
        '''Returns a list of release-tags whose tagged commits are ancestors of the given commit.

        The returned list is sorted in descending order, putting the greatest reachable tag first.
        '''
        tags = self.release_tags()

        visited = set()
        queue = list()
        queue.append(commit)
        visited.add(commit.hexsha)

        reachable_tags = list()

        while queue:
            commit = queue.pop(0)
            if commit.hexsha in tags:
                reachable_tags.append(tags[commit.hexsha])
            not_visited_parents = _.filter(
                commit.parents,
                lambda parent_commit: parent_commit.hexsha not in visited)
            if not_visited_parents:
                queue.extend(not_visited_parents)
                visited |= set(
                    _.map(not_visited_parents, lambda commit: commit.hexsha))

        reachable_tags.sort(key=lambda t: version.parse_to_semver(t),
                            reverse=True)

        if not reachable_tags:
            logger.warning('no release tag found, falling back to root commit')
            root_commits = repo.iter_commits(rev=commit, max_parents=0)
            root_commit = next(root_commits, None)
            if not root_commit:
                fail(
                    f'could not determine root commit from rev {commit.hexsha}'
                )
            if next(root_commits, None):
                fail(
                    'cannot determine range for release notes. Repository has multiple root '
                    'commits. Specify range via commit_range parameter.')
            reachable_tags.append(root_commit.hexsha)

        return reachable_tags
Esempio n. 7
0
def reachable_release_tags_from_commit(github_helper: GitHubRepositoryHelper,
                                       repo: git.Repo,
                                       commit: git.objects.Commit) -> [str]:
    tags = release_tags(github_helper, repo)

    visited = set()
    queue = list()
    queue.append(commit)
    visited.add(commit.hexsha)

    reachable_tags = list()

    while queue:
        commit = queue.pop(0)
        if commit.hexsha in tags:
            reachable_tags.append(tags[commit.hexsha])
        not_visited_parents = _.filter(
            commit.parents,
            lambda parent_commit: parent_commit.hexsha not in visited)
        if not_visited_parents:
            queue.extend(not_visited_parents)
            visited |= set(
                _.map(not_visited_parents, lambda commit: commit.hexsha))

    reachable_tags.sort(key=lambda t: parse_version_info(t), reverse=True)

    if not reachable_tags:
        warning('no release tag found, falling back to root commit')
        root_commits = repo.iter_commits(rev=commit, max_parents=0)
        root_commit = next(root_commits, None)
        if not root_commit:
            fail('could not determine root commit from rev {rev}'.format(
                rev=commit.hexsha))
        if next(root_commits, None):
            fail(
                'cannot determine range for release notes. Repository has multiple root commits. '
                'Specify range via commit_range parameter.')
        reachable_tags.append(root_commit.hexsha)

    return reachable_tags
    processed.append({
        'user':
        i,
        'values': [
            np.loadtxt(f'{DATASET_PATH}/U{i}S{x}.TXT', skiprows=1)
            for x in range(1, 41)
        ],
        #  'values': [np.loadtxt('{}/U{}S{}.TXT'.format(DATASET_PATH,i,x), skiprows=1) for x in range(1,41)],
        'language':
        'english' if i in ENGLISH else 'chinese'
    })

min_length = defaultdict(lambda: float('inf'))
max_length = defaultdict(lambda: float('-inf'))
for lang in langs:
    for val in _.filter(processed, lambda x: x['language'] == lang):
        curr_min = min(len(x) for x in val['values'])
        curr_max = max(len(x) for x in val['values'])
        if curr_min < min_length[lang]:
            min_length[lang] = curr_min
        if curr_max > min_length[lang]:
            max_length[lang] = curr_max

print(f'{len(processed)} signatures loaded\n\n')
print(
    f'English min length: {min_length["english"]}\nChinese min length: {min_length["chinese"]}\n\n'
)
print(
    f'English max length: {max_length["english"]}\nChinese max length: {max_length["chinese"]}'
)
 def get_public_properties(cls):
     """Public properties are ones begining with 'auth_' and ending with '_id'"""
     return _.filter(set(dir(cls)), lambda x: x.startswith('auth_') and ( x.endswith('_id') or x.endswith('_icon')))
Esempio n. 10
0
 def get_public_properties(cls):
     """Public properties are ones begining with 'auth_' and ending with '_id'"""
     return _.filter(
         set(dir(cls)), lambda x: x.startswith('auth_') and
         (x.endswith('_id') or x.endswith('_icon')))
Esempio n. 11
0
 def build_keyring(self):
     c = self.app.conf
     log = self.app.log
     s = self.app.services
     spinner = self.app.spinner
     if path.isdir(c.paths.keyring):
         shutil.rmtree(c.paths.keyring)
     os.makedirs(c.paths.keyring)
     s.util.chown(c.paths.keyring)
     os.chdir(c.paths.keyring)
     try:
         command = 'apt-get source ubuntu-keyring'
         log.debug('command: ' + command)
         stdout = check_output(command, stderr=STDOUT,
                               shell=True).decode('utf-8')
         log.debug(stdout)
         s.util.chown(c.paths.keyring)
     except CalledProcessError as err:
         matches = re.findall(r'Temporary\sfailure\sresolving',
                              err.output.decode('utf-8'))
         if len(matches) > 0:
             spinner.fail('no internet connection')
         if c.debug:
             if err.output:
                 log.error(err.output.decode('utf-8'))
             raise err
         exit(1)
     ubuntu_keyring_path = _.filter(os.listdir(c.paths.keyring),
                                    os.path.isdir)
     if len(ubuntu_keyring_path) > 0:
         ubuntu_keyring_path = path.join(c.paths.keyring,
                                         ubuntu_keyring_path[0])
     else:
         raise Exception('failed to source ubuntu-keyring')
     keyrings_path = path.join(ubuntu_keyring_path, 'keyrings')
     os.chdir(keyrings_path)
     s.util.subproc('gpg --import < ' +
                    path.join(keyrings_path, 'ubuntu-archive-keyring.gpg'))
     gpg_key = self.get_key()
     s.util.subproc('gpg --export ' + ' '.join(self.ubuntu_keys) + ' ' +
                    gpg_key.pub.key.short + ' > ' +
                    path.join(keyrings_path, 'ubuntu-archive-keyring.gpg'))
     os.chdir(ubuntu_keyring_path)
     stdout = s.util.subproc(
         'cat ' + path.join(keyrings_path, 'ubuntu-archive-keyring.gpg') +
         ' | sha512sum')
     sha512sum = stdout.split(' ')[0]
     lines = []
     with open(path.join(ubuntu_keyring_path, 'SHA512SUMS.txt.asc'),
               'r') as f:
         for line in f.readlines():
             line = re.sub(
                 r'(\w+)(?=\s+keyrings/ubuntu-archive-keyring.gpg)',
                 sha512sum, line)
             lines.append(line)
     with open(path.join(ubuntu_keyring_path, 'SHA512SUMS.txt.asc'),
               'w') as f:
         f.writelines(lines)
     self.app.spinner.stop()
     s.util.subproc('dpkg-buildpackage -rfakeroot -m"' + gpg_key.name +
                    ' <' + gpg_key.email + '>" -k' + gpg_key.pub.key.short)
     self.app.spinner.start()
     keyring_deb_path = _.filter(os.listdir(c.paths.keyring),
                                 lambda x: x[len(x) - 4:] == '.deb')
     if len(keyring_deb_path) > 0:
         keyring_deb_path = path.join(c.paths.keyring, keyring_deb_path[0])
     else:
         raise Exception('failed to build ubuntu-keyring')
     for deb_path in glob.glob(
             path.join(c.paths.keyring, 'ubuntu-keyring*deb')):
         shutil.copy(deb_path,
                     path.join(c.paths.mount, 'pool/main/u/ubuntu-keyring'))
     if not path.isdir(path.join(c.paths.filesystem,
                                 'var/lib/apt/keyrings')):
         os.makedirs(path.join(c.paths.filesystem, 'var/lib/apt/keyrings'))
     if not path.isdir(path.join(c.paths.filesystem, 'etc/apt')):
         os.makedirs(path.join(c.paths.filesystem, 'etc/apt'))
     if not path.isdir(path.join(c.paths.filesystem,
                                 'var/lib/apt/keyrings')):
         os.makedirs(path.join(c.paths.filesystem, 'var/lib/apt/keyrings'))
     shutil.copyfile(
         keyring_deb_path,
         path.join(c.paths.filesystem,
                   'usr/share/keyrings/ubuntu-archive-keyring.gpg'))
     shutil.copyfile(keyring_deb_path,
                     path.join(c.paths.filesystem, 'etc/apt/trusted.gpg'))
     shutil.copyfile(
         keyring_deb_path,
         path.join(c.paths.filesystem,
                   'var/lib/apt/keyrings/ubuntu-archive-keyring.gpg'))
     os.chdir(c.paths.cwd)