예제 #1
0
def run(work_path):
    # 系统初始化,参数要与创建技能时填写的检验值保持一致
    hilens.init("driving")

    # 初始化自带摄像头与HDMI显示器,
    # hilens studio中VideoCapture如果不填写参数,则默认读取test/camera0.mp4文件,
    # 在hilens kit中不填写参数则读取本地摄像头
    camera = hilens.VideoCapture()
    display = hilens.Display(hilens.HDMI)

    # 初始化模型
    model_path = os.path.join(work_path, 'model/yolo3.om')
    driving_model = hilens.Model(model_path)

    frame_index = 0
    json_bbox_list = []
    json_data = {'info': 'det_result'}

    while True:
        frame_index += 1
        try:
            time_start = time.time()

            # 1. 设备接入 #####
            input_yuv = camera.read()  # 读取一帧图片(YUV NV21格式)

            # 2. 数据预处理 #####
            img_bgr = cv2.cvtColor(input_yuv,
                                   cv2.COLOR_YUV2BGR_NV21)  # 转为BGR格式
            img_preprocess, img_w, img_h = preprocess(img_bgr)  # 缩放为模型输入尺寸

            # 3. 模型推理 #####
            output = driving_model.infer([img_preprocess.flatten()])

            # 4. 获取检测结果 #####
            bboxes = get_result(output, img_w, img_h)

            # 5-1. [比赛提交作品用] 将结果输出到json文件中 #####
            if len(bboxes) > 0:
                json_bbox = convert_to_json(bboxes, frame_index)
                json_bbox_list.append(json_bbox)

            # 5-2. [调试用] 将结果输出到模拟器中 #####
            img_bgr = draw_boxes(img_bgr, bboxes)  # 在图像上画框
            output_yuv = hilens.cvt_color(img_bgr, hilens.BGR2YUV_NV21)
            display.show(output_yuv)  # 显示到屏幕上
            time_frame = 1000 * (time.time() - time_start)
            hilens.info('----- time_frame = %.2fms -----' % time_frame)

        except RuntimeError:
            print('last frame')
            break

    # 保存检测结果
    hilens.info('write json result to file')
    result_filename = './result.json'
    json_data['result'] = json_bbox_list
    save_json_to_file(json_data, result_filename)

    hilens.terminate()
예제 #2
0
def find_pkgset():
    """
    The function which returns a list of binary packages for the given source
    package names.

    Input GET params:
        name * - source package name or list of packages
        task ** - number of task

    Output structure:
        branch
        sourcepkgname
        date
        packages
        version
        release
        disttag
        packager_email
        buildtime
        archs
     """
    server.url_logging()

    check_params = server.check_input_params()
    if check_params is not True:
        return check_params

    values = server.get_dict_values([('name', 's', 'pkg_name'), ('task', 'i')])
    if list(values.values()).count(None) > 1:
        return utils.json_str_error("One parameter only ('name'/'task').")

    if values['name']:
        pkg_ls = values['name'].split(',')
    else:
        g.connection.request_line = (QM.find_pkgset_get_package_names, {
            'task_id': values['task']
        })

        status, response = g.connection.send_request()
        if status is False:
            return response

        pkg_ls = utils.join_tuples(response)

    g.connection.request_line = (QM.find_pkgset_get_branch_with_pkgs, {
        'pkgs': tuple(pkg_ls)
    })

    status, response = g.connection.send_request()
    if status is False:
        return response

    param_ls = [
        'branch', 'sourcepkgname', 'pkgset_datetime', 'packages', 'version',
        'release', 'disttag', 'packager_email', 'buildtime', 'archs'
    ]

    return utils.convert_to_json(param_ls, response)
예제 #3
0
def unpackaged_dirs():
    """
    The function of searching unpacked directories by maintainer name.

    Input GET params:
        pkgr * - maintainer name
        pkgset * - repository name
        arch - architecture of packages

    Output structure:
        package
        directory
        version
        release
        epoch
        packager
        email
        arch
    """
    server.url_logging()

    check_params = server.check_input_params()
    if check_params is not True:
        return check_params

    values = server.get_dict_values([('pkgr', 's'),
                                     ('pkgset', 's', 'repo_name'),
                                     ('arch', 's')])

    if not values['pkgr'] or not values['pkgset']:
        return get_helper(server.helper(request.path))

    parch = server.default_archs
    if values['arch']:
        parch = values['arch'].split(',')
        if 'noarch' not in parch:
            parch.append('noarch')

    g.connection.request_line = (QM.unpackaged_dirs_get_pkg_dirs, {
        'branch': values['pkgset'],
        'email': '{}@%'.format(values['pkgr']),
        'archs': tuple(parch)
    })

    status, response = g.connection.send_request()
    if status is False:
        return response

    js_keys = [
        'package', 'directory', 'version', 'release', 'epoch', 'packager',
        'email', 'arch'
    ]

    return utils.convert_to_json(js_keys, response)
예제 #4
0
    def test_convert_to_json(self):
        values = [('elem1', 'elem2', 'elem3'),
                  ('elem1.1', 'elem2.1', 'elem3.1'),
                  ('elem1.2', 'elem2.2', 'elem3.2'), ]
        js = json.loads(
            utils.convert_to_json(['key1', 'key2', 'key3'], values)
        )

        assert 'elem1' == js['0']['key1']
        assert 'elem2.1' == js['1']['key2']
        assert 'elem3.2' == js['2']['key3']
예제 #5
0
 def read(self, cell_hash, frame_name, data_format=None, nrow=None):
     """
     Tell the selected backend to read the file, and filter if required.
     """
     data = self.store.read(cell_hash, frame_name)
     if data_format == "application/json":
         data = convert_to_json(data)
     elif data_format == "application/octet-stream":
         data = convert_to_arrow(data)
     if nrow:
         data = filter_data(data, nrow)
     return data
예제 #6
0
def dependent_packages():
    """
    The function of searching source packages whose binary packages depend on
    the given package.

    Input GET params:
        name * - name of package
        branch * - repository name

    Output structure:
        name
        version
        release
        epoch
        serial
        sourcerpm
        branch
        archs
    """
    server.url_logging()

    check_params = server.check_input_params()
    if check_params is not True:
        return check_params

    pname = server.get_one_value('name', 's', is_='pkg_name')
    if not pname:
        return get_helper(server.helper(request.path))

    pbranch = server.get_one_value('branch', 's', is_='repo_name')
    if not pbranch:
        message = 'Branch is required parameter.'
        logger.debug(message)
        return utils.json_str_error(message)

    g.connection.request_line = (QM.dependent_packages_get_dependent_pkgs, {
        'name': pname,
        'branch': pbranch
    })

    status, response = g.connection.send_request()
    if status is False:
        return response

    js_keys = [
        'name', 'version', 'release', 'epoch', 'serial', 'sourcerpm', 'branch',
        'archs'
    ]

    return utils.convert_to_json(js_keys, response)
예제 #7
0
def index():
    tree_data = convert_to_json(args.data)
    return render_template("index.html", tree_data=tree_data, title=args.title)
예제 #8
0
#!/bin/env python

import pickle
import utils
file = open('../database.pickle', 'rb')
database = pickle.load(file, encoding='latin1')
file.close()

utils.convert_to_json('../database.pickle', '../database.json')

#Put it in a nice table for easy parsing. Use semicolons to separate fields, making sure each individual field doesn't contain any semicolons since this would break parsing.

outtext = ["#Hydration free energy datbase v0.52, 6/11/17.\n"]
outtext += [
    "#Semicolon-delimited text file with fields in the following format:\n"
]
outtext += [
    "# compound id (and file prefix); SMILES; iupac name (or alternative if IUPAC is unavailable or not parseable by OEChem); experimental value (kcal/mol); experimental uncertainty (kcal/mol); Mobley group calculated value (GAFF) (kcal/mol); calculated uncertainty (kcal/mol); experimental reference (original or paper this value was taken from); calculated reference; text notes.\n"
]

cids = list(database.keys())
cids.sort()
for cid in cids:
    notes = ''
    for line in database[cid]['notes']:
        if not '\n' in line: line += '  '
        notes += line.replace(
            '\n', '  ')  #Remove line breaks as they will break format here

    if ';' in notes:  #Make sure no semicolon in notes
        #Fix issue where I used a semicolon
예제 #9
0
def what_depends_build():
    """
    The function of searching build dependencies.

    The function search build dependencies of package, list packages or
    packages from task. Also function can also use such parameters like as
    leaf, searching depth.

    Input GET params:
        name * - package or list of packages
        task ** - task id
        branch (* - for pkg_ls only) - name of repository
        arch - package architectures
        leaf - assembly dependency chain
        deep - sorting depth
        dptype - type of package (source, binary, both)
        reqfilter - filter result by dependency by binary pkg
        reqfilterbysrc - filter result by dependency by source pkg
        finitepkg - topological tree leaves

    Output structure:
        name
        version
        release
        epoch
        serial_
        sourcerpm
        branch
        archs
        buildtime
        cycle
        requires
        acl
    """
    server.url_logging()

    check_params = server.check_input_params(source=1)
    if check_params is not True:
        return check_params

    pname = server.get_one_value('name', 's', is_='pkg_name')
    task_id = server.get_one_value('task', 'i')

    # dptype option
    depends_type_to_sql = {'source': (1, ), 'binary': (0, ), 'both': (1, 0)}

    depends_type = server.get_one_value('dptype', 's')
    if depends_type not in depends_type_to_sql:
        depends_type = 'both'

    sourcef = depends_type_to_sql[depends_type]

    message = None
    if pname and task_id:
        message = "Only one parameter package 'name' or build 'task'."
    elif not pname and not task_id:
        message = "Source package 'name' or build 'task' " \
                  "is require parameters."

    if message:
        logger.debug(message)
        return utils.json_str_error(message)

    pbranch = server.get_one_value('branch', 's', is_='repo_name')
    if pname and not pbranch:
        return get_helper(server.helper(request.path))

    arch = server.get_one_value('arch', 's')
    if arch:
        arch = [arch]
        if 'noarch' not in arch:
            arch.append('noarch')
    else:
        arch = ['x86_64', 'noarch']

    # tree leaf - show only build path between 'name' and 'leaf'
    leaf = server.get_one_value('leaf', 's', 'pkg_name')
    if leaf and task_id:
        return utils.json_str_error("'leaf' may be using with 'name' only.")

    # process this query for task
    if task_id:
        # get the branch name from task
        g.connection.request_line = (
            "SELECT DISTINCT branch FROM Tasks WHERE task_id = %(id)s", {
                'id': task_id
            })

        status, response = g.connection.send_request()
        if status is False:
            return response

        if not response:
            return utils.json_str_error('Unknown task id.')

        # branch from task
        pbranch = response[0][0]
        if pbranch.lower() == 'sisyphus':
            pbranch = 'Sisyphus'

        # get the packages hashes from Task
        g.connection.request_line = (
            "SELECT pkgs FROM Tasks WHERE task_id = %(id)s", {
                'id': task_id
            })

        status, response = g.connection.send_request()
        if status is False:
            return response

        # join list of tuples of tuples
        pkgs_hsh = ()
        for tp_package in response:
            for package in tp_package[0]:
                pkgs_hsh += (package, )

        # src packages from task
        g.connection.request_line = (QM.wds_get_src_from_task, {'id': task_id})

        status, response = g.connection.send_request()
        if status is False:
            return response

        input_pkgs = utils.join_tuples(response)

    # without task - get the packages name from URL
    else:
        input_pkgs = (pname, )

    # deep level for recursive requires search
    deep_level = server.get_one_value('deep', 'i')
    if not deep_level:
        deep_level = 1

    tmp_table_name = 'tmp_pkg_ls'

    # create tmp table with list of packages
    g.connection.request_line = \
        """CREATE TEMPORARY TABLE {tmp_table} (pkgname String) \
                              """.format(tmp_table=tmp_table_name)

    status, response = g.connection.send_request()
    if status is False:
        return response

    # FIXME use package_deps module
    # base query - first iteration, build requires depth 1
    g.connection.request_line = (QM.wds_insert_build_req_deep_1.format(
        tmp_table=tmp_table_name), {
            'sfilter': sourcef,
            'pkgs': input_pkgs,
            'branch': pbranch,
            'union': list(input_pkgs)
        })

    status, response = g.connection.send_request()
    if status is False:
        return response

    max_allowed_depth = 10

    if deep_level > 1:
        if deep_level > max_allowed_depth:
            return utils.json_str_error(
                "Requires Depth cannot exceed {}".format(max_allowed_depth))

        # sql wrapper for increase depth
        deep_wrapper = QM.wds_increase_depth_wrap.format(
            tmp_table=tmp_table_name)

        # process depth for every level and add results to pkg_ls
        for i in range(deep_level - 1):
            g.connection.request_line = (
                QM.wds_insert_result_for_depth_level.format(
                    wrapper=deep_wrapper, tmp_table=tmp_table_name), {
                        'sfilter': sourcef,
                        'branch': pbranch
                    })

            status, response = g.connection.send_request()
            if status is False:
                return response

    g.connection.request_line = (QM.wds_get_acl.format(
        tmp_table=tmp_table_name), {
            'branch': pbranch.lower()
        })

    status, response = g.connection.send_request()
    if status is False:
        return response

    # get package acl
    pkg_acl_dict = {}
    for pkg in response:
        pkg_acl_dict[pkg[0]] = pkg[1][0]

    tmp_table_pkg_dep = 'package_dependency'

    # create tmp table package - dependency
    g.connection.request_line = \
        """CREATE TEMPORARY TABLE {}
(
    pkgname String,
    reqname String
)""".format(tmp_table_pkg_dep)

    status, response = g.connection.send_request()
    if status is False:
        return response

    # get source dependencies
    if depends_type in ['source', 'both']:
        # populate the temporary table with package names and their source
        # dependencies
        g.connection.request_line = (QM.wds_insert_src_deps.format(
            tmp_deps=tmp_table_pkg_dep, tmp_table=tmp_table_name), {
                'branch': pbranch,
                'pkgs': list(input_pkgs)
            })

        status, response = g.connection.send_request()
        if status is False:
            return response

    # get binary dependencies
    if depends_type in ['binary', 'both']:
        # populate the temporary table with package names and their binary
        # dependencies
        g.connection.request_line = (QM.wds_insert_binary_deps.format(
            tmp_table=tmp_table_name, tmp_req=tmp_table_pkg_dep), {
                'branch': pbranch,
                'archs': tuple(arch)
            })

        status, response = g.connection.send_request()
        if status is False:
            return response

    # select all filtered package with dependencies
    g.connection.request_line = QM.wds_get_all_filtred_pkgs_with_deps

    status, response = g.connection.send_request()
    if status is False:
        return response

    pkgs_to_sort_dict = utils.tuplelist_to_dict(response, 1)

    if not pkgs_to_sort_dict:
        return json.dumps({})

    finitepkg = server.get_one_value('finitepkg', 'b', is_='pkg_name')

    if finitepkg:
        all_dependencies = []
        for pkg, deps in pkgs_to_sort_dict.items():
            for dep in deps:
                if dep not in all_dependencies:
                    all_dependencies.append(dep)

        g.connection.request_line = \
            ("SELECT pkgname FROM {} WHERE pkgname NOT IN %(pkgs)s"
             "".format(tmp_table_name), {'pkgs': tuple(all_dependencies)})

        status, response = g.connection.send_request()
        if status is False:
            return response

        filter_by_tops = utils.join_tuples(response)

    # check leaf, if true, get dependencies of leaf package
    if leaf:
        if leaf not in pkgs_to_sort_dict.keys():
            return utils.json_str_error(
                "Package '{}' not in dependencies list.".format(leaf))

    pkg_ls_with_empty_reqs = []
    for pkg, reqs in pkgs_to_sort_dict.items():
        if not reqs:
            pkg_ls_with_empty_reqs.append(pkg)

    # sort list of dependencies by their dependencies
    sort = SortList(pkgs_to_sort_dict, pname)
    circle_deps, sorted_list = sort.sort_list()

    # create output dict with circle dependency
    result_dict = {}
    for name in sorted_list:
        result_dict[name] = []
        if name in circle_deps:
            result_dict[name] += list(circle_deps[name].keys())

    # if leaf, then select packages from the result list and their cyclic
    # dependencies on which the leaf package and create a dictionary
    if leaf:

        def recursive_search(pkgname, structure):
            for pkg in structure[pkgname]:
                if pkg not in leaf_filter and pkg != pkgname:
                    leaf_filter.append(pkg)
                    recursive_search(pkg, structure)

        leaf_filter = []
        recursive_search(leaf, pkgs_to_sort_dict)

        if leaf not in leaf_filter:
            leaf_filter.append(leaf)

        # filter result dict by leaf packages
        result_dict = {
            key: value
            for (key, value) in result_dict.items() if key in leaf_filter
        }

    # list of result package names
    sorted_pkgs = tuple(result_dict.keys())

    # get output data for sorted package list
    g.connection.request_line = (QM.wds_get_output_data.format(
        tmp_table=tmp_table_name), {
            'branch': pbranch
        })

    status, response = g.connection.send_request()
    if status is False:
        return response

    # form list of packages with it information
    pkg_info_list = []
    for info in response:
        for pkg, c_deps in result_dict.items():
            if info[0] == pkg:
                # add empty list if not acl
                if pkg not in pkg_acl_dict:
                    pkg_acl_dict[pkg] = []

                pkg_info_list.append(info + (c_deps, ) +
                                     (pkgs_to_sort_dict[pkg], ) +
                                     (pkg_acl_dict[pkg], ))

    # filter result packages list by dependencies
    reqfilter = server.get_dict_values([('reqfilter', 's', 'pkg_name'),
                                        ('reqfilterbysrc', 's', 'pkg_name')])

    if None not in reqfilter.values():
        message = "Parameters 'reqfilter' and 'reqfilterbysrc' cannot be " \
                  "used together."
        return utils.json_str_error(message)

    filter_pkgs = None
    if reqfilter['reqfilter'] or reqfilter['reqfilterbysrc']:

        if reqfilter['reqfilter']:
            reqfilter_binpkgs = tuple(reqfilter['reqfilter'].split(','))
        else:
            g.connection.request_line = (QM.wds_req_filter_by_src, {
                'srcpkg': reqfilter['reqfilterbysrc'],
                'branch': pbranch
            })

            status, response = g.connection.send_request()
            if status is False:
                return response

            reqfilter_binpkgs = utils.join_tuples(response)

        base_query = QM.wds_req_filter_by_binary.format(
            pkg="{pkg}", tmp_table=tmp_table_name)

        if len(reqfilter_binpkgs) == 1:
            base_query = base_query.format(pkg=reqfilter_binpkgs[0])
        else:
            last_query = None
            for pkg in reqfilter_binpkgs:
                if not last_query:
                    last_query = base_query.format(pkg=pkg)

                last_query = "{} AND pkgname IN ({})" \
                             "".format(last_query, base_query.format(pkg=pkg))

            base_query = last_query

        g.connection.request_line = (QM.wds_get_filter_pkgs.format(
            base_query=base_query), {
                'branch': pbranch,
                'archs': tuple(arch)
            })

        status, response = g.connection.send_request()
        if status is False:
            return response

        filter_pkgs = utils.join_tuples(response)

    # sort pkg info list
    sorted_dict = {}
    for pkg in pkg_info_list:
        if (filter_pkgs and pkg[0] in filter_pkgs) or not filter_pkgs:
            if task_id:
                if pkg[0] not in input_pkgs:
                    sorted_dict[sorted_pkgs.index(pkg[0])] = pkg
            else:
                sorted_dict[sorted_pkgs.index(pkg[0])] = pkg

    sorted_dict = list(dict(sorted(sorted_dict.items())).values())

    if finitepkg:
        sorted_dict = [pkg for pkg in sorted_dict if pkg[0] in filter_by_tops]

    js_keys = [
        'name', 'version', 'release', 'epoch', 'serial_', 'sourcerpm',
        'branch', 'archs', 'buildtime', 'cycle', 'requires', 'acl'
    ]

    return utils.convert_to_json(js_keys, sorted_dict)
예제 #10
0
def package_by_file():
    """
    The function of searching binary packages that contain the specified file.

    Input GET params:
        file * - file name or pattern
        md5 ** - file md5
        branch * - name of repository
        arch - package architecture

    Output structure:
        pkgcs
        name
        version
        release
        disttag
        arch
        branch
        files
    """
    server.url_logging()

    check_params = server.check_input_params()
    if check_params is not True:
        return check_params

    file = server.get_one_value('file', 'r', is_='pkg_name')
    md5 = server.get_one_value('md5', 's')

    if len([param for param in [file, md5] if param]) != 1:
        return get_helper(server.helper(request.path))

    pbranch = server.get_one_value('branch', 's', 'repo_name')
    if not pbranch:
        return utils.json_str_error('Branch require parameter!')

    arch = server.get_one_value('arch', 's')
    if arch:
        arch = (arch, 'noarch')
    else:
        arch = server.known_archs

    base_query = QM.package_by_file_get_hshs_by_files.format(
        in_='{}', pkghash=QM.package_by_file_get_pkg_hashs, param='{}')

    if file:
        elem, query = file, "filename LIKE %(elem)s"
    else:
        elem, query = md5, "filemd5 = %(elem)s"

    g.connection.request_line = (base_query.format(', filename', query), {
        'branch': pbranch,
        'arch': tuple(arch),
        'elem': elem
    })

    status, response = g.connection.send_request()
    if status is False:
        return response

    if not response:
        return json.dumps({})

    ids_filename_dict = utils.tuplelist_to_dict(response, 1)

    pkghashs = tuple([key for key in ids_filename_dict.keys()])

    g.connection.request_line = (QM.package_by_file_get_meta_by_hshs, {
        'hashs': pkghashs,
        'branch': pbranch
    })

    status, response = g.connection.send_request()
    if status is False:
        return response

    output_values = []
    for package in response:
        package += (ids_filename_dict[package[0]], )
        output_values.append(package[1:])

    output_params = [
        'pkgcs', 'name', 'version', 'release', 'disttag', 'arch', 'branch',
        'files'
    ]

    return utils.convert_to_json(output_params, tuple(output_values))
예제 #11
0
def package_info():
    """
    The function of showing information about given package by user parameters.

    Input GET params:
        sha1 - package sha1
        name - package name
        version - package version
        release - package release
        arch - package arch
        disttag - package disttag
        buildtime - package buildtime
        source - show source packages only (true/false)
        packager - maintainer of package
        packager_email - maintainer's email
        branch - name of repository
        full - show full package information

    Output structure:
        `without 'full' option`
        pkgcs
        packager
        packager_email
        name
        arch
        version
        release
        epoch
        buildtime
        sourcepackage
        sourcerpm
        filename
    """
    server.url_logging()

    check_params = server.check_input_params()
    if check_params is not True:
        return check_params

    buildtime_action = None

    buildtime_value = server.get_one_value('buildtime', 'i')
    if buildtime_value and buildtime_value not in ['>', '<', '=']:
        buildtime_action = "{} = {}"

    pbranch = server.get_one_value('branch', 's', is_='repo_name')

    input_params = {
        'sha1': {
            'rname': 'pkgcs',
            'type': 's',
            'action': None,
            'notenpty': False,
        },
        'name': {
            'rname': 'name',
            'type': 's',
            'action': None,
            'notempty': False,
            'is_': 'pkg_name',
        },
        'version': {
            'rname': 'version',
            'type': 's',
            'action': None,
            'notempty': False,
        },
        'release': {
            'rname': 'release',
            'type': 's',
            'action': None,
            'notempty': False,
        },
        'arch': {
            'rname': 'arch',
            'type': 's',
            'action': None,
            'notempty': False,
        },
        'disttag': {
            'rname': 'disttag',
            'type': 's',
            'action': None,
            'notempty': False,
        },
        'buildtime': {
            'rname': 'buildtime',
            'type': 'i',
            'action': buildtime_action,
            'notempty': False,
        },
        'source': {
            'rname': 'sourcepackage',
            'type': 'b',
            'action': None,
            'notempty': False,
        },
        'packager': {
            'rname': 'name',
            'type': 's',
            'action': None,
            'notempty': False,
        },
        'packager_email': {
            'rname': 'packager_email',
            'type': 's',
            'action': None,
            'notempty': False,
        },
    }

    params_values = server.get_values_by_params(input_params)
    if params_values is False:
        return get_helper(server.helper(request.path))

    full = bool(server.get_one_value('full', 'b'))

    output_params = [
        'pkgcs',
        'packager',
        'packager_email',
        'name',
        'arch',
        'version',
        'release',
        'epoch',
        'buildtime',
        'sourcepackage',
        'sourcerpm',
        'filename',
    ]
    if full:
        output_params = server.package_params

    g.connection.request_line = \
        "SELECT pkg.pkghash, {p_params} FROM last_packages WHERE " \
        "{p_values} {branch}".format(
            p_params=", ".join(output_params),
            p_values=" ".join(params_values),
            branch='{}'
        )

    if pbranch:
        g.connection.request_line = g.connection.request_line.format(
            "AND assigment_name = %(branch)s")
    else:
        g.connection.request_line = g.connection.request_line.format('')

    g.connection.request_line = (g.connection.request_line, {
        'branch': pbranch
    })

    status, response = g.connection.send_request()
    if status is False:
        return response

    json_retval = json.loads(
        utils.convert_to_json(['pkghash'] + output_params, response))

    if full and len(response) > 0:

        pkghashs = utils.join_tuples(response)

        # files
        g.connection.request_line = (
            "SELECT pkghash, groupUniqArray(filename) FROM File WHERE pkghash "
            "IN %(pkghshs)s GROUP BY pkghash", {
                'pkghshs': pkghashs
            })

        status, response = g.connection.send_request()
        if status is False:
            return response

        files_dict = utils.tuplelist_to_dict(response, 1)

        # add empty list if package has no files
        for hsh in pkghashs:
            if hsh not in files_dict:
                files_dict[hsh] = []

        # depends
        g.connection.request_line = (
            "SELECT pkghash, dptype, dpname FROM last_depends WHERE pkghash "
            "IN %(pkghshs)s", {
                'pkghshs': pkghashs
            })

        status, response = g.connection.send_request()
        if status is False:
            return response

        depends_dict = utils.tuplelist_to_dict(response, 2)

        depends_struct = {}
        for pkg in depends_dict:
            depend_ls = depends_dict[pkg]

            depends_struct[pkg] = {}

            for i in range(0, len(depend_ls), 2):
                if depend_ls[i] not in depends_struct[pkg]:
                    depends_struct[pkg][depend_ls[i]] = []

                depends_struct[pkg][depend_ls[i]].append(depend_ls[i + 1])

        for elem in json_retval:
            pkghash = json_retval[elem]['pkghash']

            # add files to result structure
            json_retval[elem]['files'] = files_dict[pkghash]

            # add depends to result structure
            for dep in depends_struct[pkghash]:
                json_retval[elem][dep] = depends_struct[pkghash][dep]

    # remove pkghash from result
    for value in json_retval.values():
        value.pop('pkghash', None)

    return json.dumps(json_retval, sort_keys=False)
예제 #12
0
def misconflict_packages():
    """
    The function of searching for conflicting files in packages that do not have
    a conflict.

    Input GET params:
        pkg_ls * - package or list of packages
        task ** - task id
        branch (* - for pkg_ls only) - name of repository
        arch - package architectures

    Output structure:
        input package
        conflict package
        version
        release
        epoch
        architectures
        files with conflict
    """
    server.url_logging()

    check_params = server.check_input_params(source=0)
    if check_params is not True:
        return check_params

    values = server.get_dict_values([('pkg_ls', 's', 'pkg_name'), ('task',
                                                                   'i'),
                                     ('branch', 's', 'repo_name'),
                                     ('arch', 's')])

    if values['pkg_ls'] and values['task']:
        return utils.json_str_error("One parameter only. ('name'/'task')")

    if not values['pkg_ls'] and not values['task']:
        return utils.json_str_error(
            "'pkg_ls' or 'task' is require parameters.")

    if values['pkg_ls'] and not values['branch']:
        return get_helper(server.helper(request.path))

    if values['arch']:
        allowed_archs = values['arch'].split(',')
        if 'noarch' not in allowed_archs:
            allowed_archs.append('noarch')
    else:
        allowed_archs = server.default_archs

    allowed_archs = tuple(allowed_archs)

    # prepare packages list from Task
    if values['task']:
        # get branch of task
        g.connection.request_line = (
            "SELECT DISTINCT branch FROM Tasks WHERE task_id = %(task)d", {
                'task': values['task']
            })

        status, response = g.connection.send_request()
        if status is False:
            return response

        if not response:
            return utils.json_str_error(
                "Task {task} not found!".format(task=values['task']))

        pbranch = response[0][0]

        # get packages of task for last build iteration (hashes)
        g.connection.request_line = (QM.misconflict_pkgs_get_pkgs_of_task, {
            'task': values['task']
        })

        status, response = g.connection.send_request()
        if status is False:
            return response

        if not response:
            return utils.json_str_error(
                "Error: Packages in task {task} not found!"
                "".format(task=values['task']))

        # joining tuples from response list
        input_pkg_hshs = [hsh[0] for hsh in response]

    # package list without task
    else:
        pkg_ls = tuple(values['pkg_ls'].split(','))
        pbranch = values['branch']

        # get hash for package names
        g.connection.request_line = (QM.misconflict_pkgs_get_hshs_by_pkgs, {
            'pkgs': tuple(pkg_ls),
            'branch': pbranch,
            'arch': allowed_archs
        })

        status, response = g.connection.send_request()
        if status is False:
            return response

        if not response:
            return utils.json_str_error(
                "Error: Packages {pkgs} not found in pkgset {branch}!".format(
                    pkgs=pkg_ls, branch=pbranch))

        # check the existence of a package by comparing the number of input
        # and selected from database
        if len(set([pkg[1] for pkg in response])) != len(pkg_ls):
            return utils.json_str_error("Error of input data.")

        # form a list of package hashes
        input_pkg_hshs = [pkg[0] for pkg in response]

    if not input_pkg_hshs:
        return json.dumps({})

    # get list of (input package | conflict package | conflict files)
    g.connection.request_line = (QM.misconflict_pkgs_get_pkg_with_conflict, {
        'hshs': tuple(input_pkg_hshs),
        'branch': pbranch,
        'arch': allowed_archs
    })

    status, response = g.connection.send_request()
    if status is False:
        return response

    if not response:
        return json.dumps({})

    hshs_files = response

    # list of conflicting package pairs
    in_confl_hshs = [(hsh[0], hsh[1]) for hsh in hshs_files]

    # filter conflicts by provides/conflicts
    c_filter = ConflictFilter(pbranch, allowed_archs)

    # check for the presence of the specified conflict each pair
    # if the conflict between the packages in the pair is specified,
    # then add the pair to the list
    filter_ls = c_filter.detect_conflict(in_confl_hshs)

    # create dict with package names by hashes
    hsh_name_dict = defaultdict(dict)
    for hsh_1, hsh_2, _, name_2, name_1, _ in response:
        hsh_name_dict[hsh_1], hsh_name_dict[hsh_2] = name_1, name_2

    # convert the hashes into names, put in the first place in the pair
    # the name of the input package, if it is not
    filter_ls_names = []
    for hsh in filter_ls:
        inp_pkg = hsh[0] if hsh[0] in input_pkg_hshs else hsh[1]
        out_pkg = hsh[0] if hsh[0] != inp_pkg else hsh[1]
        result_pair = (hsh_name_dict[inp_pkg], hsh_name_dict[out_pkg])
        if result_pair not in filter_ls:
            filter_ls_names.append(result_pair)

    # form the list of tuples (input package | conflict package | conflict files)
    result_list, output_pkgs = [], set()
    for pkg in hshs_files:
        [output_pkgs.add(i) for i in pkg[:2]]
        pkg = (hsh_name_dict[pkg[0]], hsh_name_dict[pkg[1]], pkg[2])
        if pkg not in result_list:
            result_list.append(pkg)

    # get architectures of found packages
    g.connection.request_line = QM.misconflict_pkgs_get_pkg_archs.format(
        hshs=tuple(output_pkgs))

    status, response = g.connection.send_request()
    if status is False:
        return response

    pkg_archs_dict = utils.tuplelist_to_dict(response, 1)

    # look for duplicate pairs of packages in the list with different files
    # and join them
    result_dict_cleanup = defaultdict(list)
    for pkg in result_list:
        result_dict_cleanup[(pkg[0], pkg[1])] += pkg[2]

    confl_pkgs = utils.remove_duplicate(
        [pkg[1] for pkg in result_dict_cleanup.keys()])

    # get main information of packages by package hashes
    g.connection.request_line = (QM.misconflict_pkgs_get_meta_by_hshs, {
        'pkgs': tuple(confl_pkgs),
        'branch': pbranch,
        'arch': allowed_archs
    })

    status, response = g.connection.send_request()
    if status is False:
        return response

    # form dict name - package info
    name_info_dict = {}
    for pkg in response:
        name_info_dict[pkg[0]] = pkg[1:]

    # form list of tuples (input pkg | conflict pkg | pkg info | conflict files)
    # and filter it
    result_list_info = []
    for pkg, files in result_dict_cleanup.items():
        inp_pkg_archs = set(pkg_archs_dict[pkg[0]])
        found_pkg_archs = set(pkg_archs_dict[pkg[1]])
        intersect_pkg_archs = inp_pkg_archs.intersection(found_pkg_archs)

        if (pkg[0], pkg[1]) not in filter_ls_names and intersect_pkg_archs:
            pkg = (pkg[0], pkg[1]) + \
                  name_info_dict[pkg[1]][:-1] + \
                  (list(intersect_pkg_archs),) + (files,)
            result_list_info.append(pkg)

    return utils.convert_to_json([
        'input_package', 'conflict_package', 'version', 'release', 'epoch',
        'archs', 'files_with_conflict'
    ], result_list_info)
예제 #13
0
def task_info():
    server.url_logging()

    check_params = server.check_input_params()
    if check_params is not True:
        return check_params

    task_id = server.get_one_value('task', type_='i')
    if not task_id:
        return get_helper(server.helper(request.path))

    try_iteration = server.get_one_value('rebuild', type_='s')
    if try_iteration:
        try_iteration = tuple([int(i) for i in try_iteration.split('.')])

    g.connection.request_line = """
    SELECT DISTINCT concat(toString(try), '.', toString(iteration)),
                branch,
                userid
    FROM Tasks
    WHERE task_id = {}
    """.format(task_id)

    status, response = g.connection.send_request()
    if status is False:
        return response

    if not response:
        return utils.json_str_error("Non-existent task number!")

    branch, user_id = response[0][1], response[0][2]
    all_rebuilds = [i[0] for i in response]

    g.connection.request_line = QM.task_info_get_task_content.format(
        id=task_id)
    if try_iteration:
        g.connection.request_line = QM.task_info_get_task_content_rebuild.format(
            id=task_id, ti=try_iteration)

    status, response = g.connection.send_request()
    if status is False:
        return response

    src_pkgs = response

    task_status = src_pkgs[0][1]
    try_iteration = src_pkgs[0][3]
    pkg_subtask = {pkg[0]: pkg[2] for pkg in src_pkgs}

    pkg_hshs = [
        val for sublist in [[i[0]] + i[4] for i in response] for val in sublist
    ]

    g.connection.request_line = """SELECT pkghash,
                                          name,
                                          version,
                                          release,
                                          arch,
                                          description
                                   FROM Package
                                   WHERE pkghash IN {}
                                   """.format(tuple(pkg_hshs))

    status, response = g.connection.send_request()
    if status is False:
        return response

    name_hsh = utils.tuplelist_to_dict(response, 5)

    http = utils.HtmlParser(
        'a',
        ['Name', 'Last modified', 'Size', 'Description', 'Parent Directory'])

    for hsh, subtask in pkg_subtask.items():
        result_list = []
        for i in ['approved', 'disapproved']:
            url = "http://git.altlinux.org/tasks/{task}/acl/{act}/{subtask}/" \
                  "".format(task=task_id, subtask=subtask, act=i)
            result = http.parse_html(url)
            if result:
                result = result.split('::')
                msg_format = [result[0].strip()] + \
                             [k.strip() for k in result[1].split('\n')][:-1]
            else:
                msg_format = ''

            result_list.append(msg_format or '')

        pkg_subtask[hsh] = [pkg_subtask[hsh]] + result_list

    beehive_result = requests.get(
        "http://bb.ipa.basealt.ru/RESULT/{}/check-beehive-result.log"
        "".format(task_id))

    beehive_result = "" if beehive_result.status_code != 200 \
        else beehive_result.content.decode()

    task_msg_req = requests.get(
        'http://git.altlinux.org/tasks/{task}/logs/events.{ti}.log'
        ''.format(task=task_id, ti=try_iteration))

    task_msg = ''
    if task_msg_req.status_code == 200:
        message = re.findall(r'message:(.*)', task_msg_req.content.decode())
        if message:
            task_msg = message[0].strip()

    result_list = []
    for pkg in src_pkgs:
        pkg = [
            *name_hsh[pkg[0]][:-2], branch, user_id, task_status,
            *pkg_subtask[pkg[0]], task_msg, try_iteration,
            sorted(all_rebuilds),
            utils.tuplelist_to_dict([(name_hsh[hsh][3], name_hsh[hsh][0])
                                     for hsh in pkg[4]],
                                    1), name_hsh[pkg[0]][-1], beehive_result
        ]

        if pkg not in result_list:
            result_list.append(pkg)

    fields = [
        'src_pkg', 'version', 'release', 'branch', 'user', 'status', 'subtask',
        'approve', 'disapprove', 'task_msg', 'current_rebuild', 'all_rebuilds',
        'task_content', 'description', 'beehive_check'
    ]

    return utils.convert_to_json(fields, sorted(result_list,
                                                key=itemgetter(6)))
    def scrape_feed(self):
        p = self.ig_parser
        pf = self.post_filter

        # open instagram main page, so that method can be used in loop
        self.open_link('https://www.instagram.com/')
        random_sleep()

        # scroll feed and gather post links from timestamp element
        post_links = self.fetch_posts(config.num_posts)
        random_sleep()

        # loop through post links
        print('Parsing posts...')
        print('-------------------------------------------------------------------------------')
        for i, post_link in enumerate(post_links):
            self.open_link(post_link)
            post = p.post_parser(post_link)
            filtered_post = pf.filter_post(post, config, post_keywords)
            is_relevant_post = pf.is_relevant_post(filtered_post)
            random_sleep()

            # like post
            if config.like_post == True and pt.like_counter < config.num_likes:
                like = self.click_button(xpaths.like_button, 'like button')
                if like == True:
                    pt.like_counter += 1
                else:
                    pt.already_liked_counter += 1

            # breaks if only old posts
            if pt.already_liked_counter > config.max_already_liked:
                break
            
            # skips irrelevant posts
            if is_relevant_post == False:
                continue

            # if matched post, save and try to find location
            if filtered_post.matches_keyword == True:
                pt.matched_post_counter += 1
                if config.save_post == True:
                    self.click_button(xpaths.save_button, 'save button')
                    pt.save_counter += 1
                as_json = convert_to_json(post)
                pt.all_posts.append(as_json)
                if post.location != '':
                    try:
                        position = self.find_lat_long(post.location)
                    except ValueError:
                        pass
                    if position != '':
                        self.lat_lon_to_gcs(position, post)
                        point = Point((post.lon, post.lat))
                        date_as_string = post.date.isoformat()
                        short_date = date_as_string[:11]
                        setattr(post, 'date', short_date)
                        pt.posts_with_location.append(Feature(geometry=point, properties = asdict(post)))


#                    try: 
#                        self.open_link(post.author)
#                        profile = p.profile_parser(post.author, location_keywords)
#                        setattr(post, 'lat', profile.lat)
#                        setattr(post, 'lon', profile.lon)
#                    except ValueError:
#                        pass

        print('Making json files.')
        with open("matched_posts.json", "w") as outfile: 
            json.dump(pt.all_posts, outfile, indent = 4)
        with open("matched_posts_locations.json", "w") as outfile: 
            json.dump(pt.posts_with_location, outfile, indent = 4)
        print('-------------------------------------------------------------------------------')