コード例 #1
0
def merge(input, output):
    filepath_to_package_name = lambda filepath: filepath[filepath.rindex(
        os.sep) + 1:filepath.rindex('.json')]

    # List rules
    input_rules = list_filter(
        is_app_rules,
        list_map(lambda item: input + os.sep + item, os.listdir(input)))
    existing_packs = list_map(
        filepath_to_package_name,
        list_filter(
            is_app_rules,
            list_map(lambda item: output + os.sep + 'apps' + os.sep + item,
                     os.listdir(output + os.sep + 'apps'))))

    skipped_file = 0
    finished_file = 0
    for input_rule in input_rules:
        package_name = filepath_to_package_name(input_rule)
        # If existing this rules in output path, skip it.
        if (package_name in existing_packs):
            skipped_file += 1
        else:
            shutil.copy(
                input_rule,
                output + os.sep + 'apps' + os.sep + package_name + '.json')
            finished_file += 1
    print('Finished merging. Skipped %d files.' \
    ' Copied %d files.' % (skipped_file, finished_file))
コード例 #2
0
def close_existing_rules_issues(github, rules_path):
    repo = github.get_repo(ISSUE_REPO)

    # Get issues only created by auto wizard
    issues = list_filter(
        lambda issue: issue.title.startswith( \
            '[New rules request][AUTO]'),
        repo.get_issues(state='open').get_page(0)
    )

    # Get existing rules package names
    package_name = list_map(
        lambda item: item[item.rindex(os.sep) + 1:item.rindex('.json')],
        list_filter(
            is_app_rules,
            list_map(lambda item: rules_path + os.sep + 'apps' + os.sep + item,
                     os.listdir(rules_path + os.sep + 'apps'))))

    print('Start closing issues...')
    count = 0
    for issue in issues:
        if issue.title[issue.title.rindex(' ') + 1:] in package_name:
            issue.edit(state="closed")
            count += 1
    if count == 0:
        print('No issues to close.')
    else:
        print('Closed %d issues.' % count)
コード例 #3
0
def add_ids_for_observers(path):
    rules = list_filter(
        is_app_rules,
        list_map(lambda item: path + os.sep + 'apps' + os.sep + item,
                 os.listdir(path + os.sep + 'apps')))

    # Update rules
    for rule in rules:
        model = {}
        try:
            with codecs.open(rule, mode='r', encoding='utf-8') as f:
                model = json.loads(f.read())
                f.close()
            changed = False
            if 'observers' in model.keys():
                for observer in model['observers']:
                    if not 'id' in observer.keys():
                        temp_id = observer['description']
                        count = 0
                        while contains_id_in_observer(model['observers'], \
                            temp_id + '_' + str(count)):
                            count += 1
                        observer['id'] = temp_id + '_' + str(count)
                        changed = True
            if changed:
                with codecs.open(rule, mode='w', encoding='utf-8') as f:
                    f.write(json.dumps(model, indent=2, ensure_ascii=False))
                    f.close()
        except Exception as e:
            print('Failed to update ' + rule)
コード例 #4
0
def convert(input):
    rules_path = input
    rules = list_filter(
        is_app_rules,
        list_map(lambda item: rules_path + os.sep + item,
                 os.listdir(rules_path)))
    print('Found rules count: %d' % (len(rules)))

    # Make output path
    output_path = input + os.sep + 'output'
    if os.path.isfile(output_path):
        os.remove(output_path)
    if not os.path.exists(output_path):
        os.mkdir(output_path)
    print('Output to ' + output_path)

    # Convert and write out results
    for rule in rules:
        with codecs.open(rule, mode='r', encoding='utf-8') as f:
            model = json.loads(f.read(), object_pairs_hook=OrderedDict)
            data_converter.convert_old_data(model)

            with codecs.open(output_path + os.sep + model['package'] + '.json',
                             mode='w',
                             encoding='utf-8') as out:
                out.write(json.dumps(model, indent=2, ensure_ascii=False))
                out.close()
            f.close()
    print('Finished converting.')
コード例 #5
0
def make_verfied_list(path):
    rules = list_filter(
        is_app_rules,
        list_map(
            lambda item: path + os.sep + 'apps' + os.sep + item,
            os.listdir(path + os.sep + 'apps')
        )
    )

    # Get all verified apps package name
    verified_apps = []
    for rule in rules:
        with codecs.open(rule, mode='r', encoding='utf-8') as f:
            print(rule)
            model = json.loads(f.read())
            if 'verified' in model.keys() and model['verified']:
                verified_apps.append({
                    'package_name': model['package']
                })
            f.close()
    print('Found verified apps count: %d' % (len(verified_apps)))

    # Write to output.json
    with codecs.open(
        path + os.sep + 'verified_apps.output.json',
        mode='w',
        encoding='utf-8') as out:
        print('Output to ' + path + os.sep + 'verified_apps.output.json')
        out.write(json.dumps(verified_apps, indent=2, ensure_ascii=False))
        out.close()
        print('Finished making list.')
コード例 #6
0
def download_issues(github):
    repo = github.get_repo(ISSUE_REPO)

    # Get issues only created by auto wizard
    issues = list_filter(
        lambda issue: issue.title.startswith( \
            '[New rules request][AUTO]'),
        repo.get_issues(state='open').get_page(0)
    )
    issues = list_filter_not(is_issue_need_discussion, issues)

    # Make output path
    output_path = os.getcwd() + os.sep + 'output'
    if os.path.isfile(output_path):
        os.remove(output_path)
    if not os.path.exists(output_path):
        os.mkdir(output_path)
    print('Output to ' + output_path)

    total = len(issues)
    current = 0
    print_progress = lambda a, b: \
        print('Start downloading rules... %d/%d' % (a, b), end='')
    CODE_BLOCK = '```'

    print_progress(current, total)

    # Download json output from issues
    for issue in issues:
        current += 1

        # Get information from issue
        package_name = issue.title[issue.title.rindex(' ') + 1:]
        print(' Package: ' + package_name, end='\r')

        if (not os.path.isfile(output_path + os.sep + package_name + '.json')):
            body = repo.get_issue(issue.number).body
            content = body[body.index(CODE_BLOCK) +
                           len(CODE_BLOCK):body.rindex(CODE_BLOCK)]

            # Try to convert old data
            try:
                content = json.dumps(data_converter.convert_old_data(
                    json.loads(content, object_pairs_hook=OrderedDict)),
                                     indent=2,
                                     ensure_ascii=False)
            except:
                pass

            # Add to cache
            with codecs.open(output_path + os.sep + package_name + '.json',
                             mode='w',
                             encoding='utf-8') as f:
                f.write(content)
                f.close()

        print_progress(current, total)

    # Done downloading
    print('\nDownloaded %d rules' % (current))

    print('\nFinished downloading issues. Remember to check if rules are ' \
          'vaild.')