コード例 #1
0
def merge(input, output):
    filepath_to_package_name = lambda filepath: filepath[filepath.rindex(
        os.sep) + 1:filepath.rindex('.json')]

    # List rules
    input_rules = list_filter(
        is_app_rules,
        list_map(lambda item: input + os.sep + item, os.listdir(input)))
    existing_packs = list_map(
        filepath_to_package_name,
        list_filter(
            is_app_rules,
            list_map(lambda item: output + os.sep + 'apps' + os.sep + item,
                     os.listdir(output + os.sep + 'apps'))))

    skipped_file = 0
    finished_file = 0
    for input_rule in input_rules:
        package_name = filepath_to_package_name(input_rule)
        # If existing this rules in output path, skip it.
        if (package_name in existing_packs):
            skipped_file += 1
        else:
            shutil.copy(
                input_rule,
                output + os.sep + 'apps' + os.sep + package_name + '.json')
            finished_file += 1
    print('Finished merging. Skipped %d files.' \
    ' Copied %d files.' % (skipped_file, finished_file))
コード例 #2
0
def close_existing_rules_issues(github, rules_path):
    repo = github.get_repo(ISSUE_REPO)

    # Get issues only created by auto wizard
    issues_list = repo.get_issues(state='open')
    issues = []
    for i in range(0, int(issues_list.totalCount / 30)):
        for issue in issues_list.get_page(i):
            if issue.title.startswith(
                    '[New rules request][AUTO]'
            ) and not is_issue_need_discussion(issue):
                issues.append(issue)

    # Get existing rules package names
    package_name = list_map(
        lambda item: item[item.rindex(os.sep) + 1:item.rindex('.json')],
        list_filter(
            is_app_rules,
            list_map(lambda item: rules_path + os.sep + 'apps' + os.sep + item,
                     os.listdir(rules_path + os.sep + 'apps'))))

    print('Start closing issues...')
    count = 0
    for issue in issues:
        if issue.title[issue.title.rindex(' ') + 1:] in package_name:
            issue.edit(state="closed")
            count += 1
    if count == 0:
        print('No issues to close.')
    else:
        print('Closed %d issues.' % count)
コード例 #3
0
ファイル: kmeans_elbow.py プロジェクト: Gingaless/rkg-torch
def find_KM_elbow_by_thr(data, ini_thr_cost_ratio, factor_function, data_mapping_function=None, **KM_kwargs):
    assert len(data) > 0
    _data = list_map(data_mapping_function, data) if data_mapping_function != None else data
    min_of_factor = np.min(list_map(factor_function, data))
    for i in range(1, len(data)+1):
        KM = fit_KM(_data, n_clusters=i, **KM_kwargs)
        if KM.inertia_/min_of_factor < ini_thr_cost_ratio:
            return KM
    raise Exception('There exists no k value of which cost ratio is lower than initial threshold cost raito.')
コード例 #4
0
def merge_verified_list(path):
    app_list = []

    # Load origin data
    with codecs.open(path + os.sep + 'verified_apps.json',
        mode='r',
        encoding='utf-8') as f:
        app_list = list_map(
            lambda item: item['package_name'],
            json.loads(f.read())
        )
        f.close()
    print('Origin data count: %d' % (len(app_list)))

    # Load new verified apps
    with codecs.open(path + os.sep + 'verified_apps.output.json',
        mode='r',
        encoding='utf-8') as f:
        added_count = 0
        for new_item in list_map(
            lambda item: item['package_name'],
            json.loads(f.read())):
            # Filter out existing items
            if not new_item in app_list:
                app_list.append(new_item)
                added_count += 1
        f.close()
    print('Added %d items from verified_apps.output.json.' \
    ' Now it will be deleted.' % (added_count))
    os.remove(path + os.sep + 'verified_apps.output.json')

    # Sort and output merged data
    app_list.sort()
    with codecs.open(path + os.sep + 'verified_apps.json',
        mode='w',
        encoding='utf-8') as out:
        out.write(json.dumps(
            list_map(
                lambda package_name: {
                    'package_name': package_name
                },
                app_list
            ),
            indent=2,
            ensure_ascii=False
        ))
        out.close()
    print('Finished merge verified_apps.json')
コード例 #5
0
def add_ids_for_observers(path):
    rules = list_filter(
        is_app_rules,
        list_map(lambda item: path + os.sep + 'apps' + os.sep + item,
                 os.listdir(path + os.sep + 'apps')))

    # Update rules
    for rule in rules:
        model = {}
        try:
            with codecs.open(rule, mode='r', encoding='utf-8') as f:
                model = json.loads(f.read())
                f.close()
            changed = False
            if 'observers' in model.keys():
                for observer in model['observers']:
                    if not 'id' in observer.keys():
                        temp_id = observer['description']
                        count = 0
                        while contains_id_in_observer(model['observers'], \
                            temp_id + '_' + str(count)):
                            count += 1
                        observer['id'] = temp_id + '_' + str(count)
                        changed = True
            if changed:
                with codecs.open(rule, mode='w', encoding='utf-8') as f:
                    f.write(json.dumps(model, indent=2, ensure_ascii=False))
                    f.close()
        except Exception as e:
            print('Failed to update ' + rule)
コード例 #6
0
def convert(input):
    rules_path = input
    rules = list_filter(
        is_app_rules,
        list_map(lambda item: rules_path + os.sep + item,
                 os.listdir(rules_path)))
    print('Found rules count: %d' % (len(rules)))

    # Make output path
    output_path = input + os.sep + 'output'
    if os.path.isfile(output_path):
        os.remove(output_path)
    if not os.path.exists(output_path):
        os.mkdir(output_path)
    print('Output to ' + output_path)

    # Convert and write out results
    for rule in rules:
        with codecs.open(rule, mode='r', encoding='utf-8') as f:
            model = json.loads(f.read(), object_pairs_hook=OrderedDict)
            data_converter.convert_old_data(model)

            with codecs.open(output_path + os.sep + model['package'] + '.json',
                             mode='w',
                             encoding='utf-8') as out:
                out.write(json.dumps(model, indent=2, ensure_ascii=False))
                out.close()
            f.close()
    print('Finished converting.')
コード例 #7
0
def make_verfied_list(path):
    rules = list_filter(
        is_app_rules,
        list_map(
            lambda item: path + os.sep + 'apps' + os.sep + item,
            os.listdir(path + os.sep + 'apps')
        )
    )

    # Get all verified apps package name
    verified_apps = []
    for rule in rules:
        with codecs.open(rule, mode='r', encoding='utf-8') as f:
            print(rule)
            model = json.loads(f.read())
            if 'verified' in model.keys() and model['verified']:
                verified_apps.append({
                    'package_name': model['package']
                })
            f.close()
    print('Found verified apps count: %d' % (len(verified_apps)))

    # Write to output.json
    with codecs.open(
        path + os.sep + 'verified_apps.output.json',
        mode='w',
        encoding='utf-8') as out:
        print('Output to ' + path + os.sep + 'verified_apps.output.json')
        out.write(json.dumps(verified_apps, indent=2, ensure_ascii=False))
        out.close()
        print('Finished making list.')
コード例 #8
0
ファイル: kmeans_elbow.py プロジェクト: Gingaless/rkg-torch
def find_KM_elbow_by_log_decay_argmin(data, ini_thr_cost_ratio, factor_function, data_mapping_function=None, **KM_kwargs):
    
    if len(data) < 3:
        return find_KM_elbow_by_thr(data, ini_thr_cost_ratio, factor_function, data_mapping_function=data_mapping_function, **KM_kwargs)
    else:
        _data = list_map(data_mapping_function, data) if data_mapping_function != None else data
        KMs = [fit_KM(_data, n_clusters=i, **KM_kwargs) for i in range(1, len(_data)+1)]
        log_costs = np.log(arr_map(lambda KM : KM.inertia_, KMs))
        log_costs = [x for x in log_costs if x != -np.inf]
        log_decay = [log_costs[i] - log_costs[i-1] for i in range(1, len(log_costs))]
        if np.nan in log_decay:
            log_decay = [x for x in log_decay if x!=np.nan]
        if -np.inf in log_decay:
            log_decay = [x for x in log_decay if x!=-np.inf]
        return KMs[np.argmin(log_decay)]