def test_good_file(self, request_mock):
        with full_rig_with_s3() as (app, engine):
            upload_id = self.do_upload(app, request_mock)
            # okay, here's what we really want to test.
            # call the merge endpoint
            response = app.post(
                '/api/upload/merge_file?uploadId={}'.format(upload_id))
            response_data = json.loads(response.get_data().decode('utf-8'))
            assert response_data['status'] == 'success'
            # make sure that there is a new merged file on s3
            expected_s3_path = 's3://test-bucket/boone/hmis_service_stays/merged'
            with open_sesame(expected_s3_path, 'rb') as expected_s3_file:
                reader = csv.reader(expected_s3_file)
                assert len([row for row in reader]) == ROWS_IN_GOOD_HMIS_FILE

            # and make sure that the merge log has a record of this
            assert db_session.query(MergeLog).filter(
                MergeLog.upload_id == '123-456').one

            # make sure that the master table has been bootstrapped
            master_table = generate_master_table_name('boone',
                                                      'hmis_service_stays')
            total_rows = db_session.query(
                'count(*) from {}'.format(master_table)).one()
            assert total_rows == (ROWS_IN_GOOD_HMIS_FILE - 1, )

            # make sure that we filled in some matched ids
            total_rows = db_session.query(
                'count(matched_id is not null) from {}'.format(
                    master_table)).one()
            assert total_rows == (ROWS_IN_GOOD_HMIS_FILE - 1, )
Exemple #2
0
def detail_article(article_id):
    blog = Blog.query.get(article_id)
    db_session.query(Blog).filter(Blog.id==article_id).\
        update({Blog.count_scan: Blog.count_scan+1})
    db_session.commit()
    category = get_blog_category()
    return render_template('articledetail.html',
                           article=blog,
                           categorys=category)
    def test_file_storage(self, request_mock):
        with TemporaryDirectory() as temp_dir:
            root_dir = os.getcwd()
            s3_less_config = {
                'raw_uploads_path':
                os.path.join(
                    temp_dir,
                    '{jurisdiction}-{event_type}-uploaded-{date}-{upload_id}'),
                'merged_uploads_path':
                os.path.join(temp_dir, '{jurisdiction}-{event_type}-merged')
            }
            with full_rig_without_s3() as (app, engine):
                with patch.dict('webapp.utils.app_config', s3_less_config):
                    upload_id = self.do_upload(app, request_mock)
                    # okay, here's what we really want to test.
                    # call the merge endpoint
                    response = app.post(
                        '/api/upload/merge_file?uploadId={}'.format(upload_id))
                    response_data = json.loads(
                        response.get_data().decode('utf-8'))
                    assert response_data['status'] == 'success'
                    # make sure that there is a new merged file on the FS
                    expected_path = os.path.join(temp_dir,
                                                 'boone-jail_bookings-merged')
                    with open(expected_path, 'rb') as expected_file:
                        reader = csv.reader(expected_file)
                        assert len([row for row in reader]) == 11

                    # and make sure that the merge log has a record of this
                    assert db_session.query(MergeLog).filter(
                        MergeLog.upload_id == '123-456').one
def update_release(release_codename, **kwargs):
    release = (db_session.query(Release).filter(
        Release.codename == release_codename).one_or_none())

    if not release:
        return make_response(
            jsonify({"message": f"Release {release_codename} doesn't exist"}),
            404,
        )

    release_data = request.json
    release.version = release_data["version"]
    release.name = release_data["name"]
    release.development = release_data["development"]
    release.lts = release_data["lts"]
    release.release_date = release_data["release_date"]
    release.esm_expires = release_data["esm_expires"]
    release.support_expires = release_data["support_expires"]

    db_session.add(release)

    try:
        db_session.commit()
    except IntegrityError as error:
        return make_response(
            jsonify({
                "message": "Failed updating release",
                "error": error.orig.args[0],
            }),
            422,
        )

    return make_response(jsonify({"message": "Release updated"}), 200)
Exemple #5
0
def can_access_file(upload_id):
    upload = db_session.query(Upload).get(upload_id)
    if not upload:
        raise ValueError('upload_id: %s not present in metadata database',
                         upload_id)
    logger.info('Found jurisdiction %s and event type %s for upload id %s',
                upload.jurisdiction_slug, upload.event_type_slug, upload_id)
    return can_upload_file(upload.jurisdiction_slug, upload.event_type_slug)
def _update_statuses(cve, data, packages):
    statuses = cve.packages

    statuses_to_check = (db_session.query(Status).filter(
        Status.cve_id == cve.id).all())
    statuses_to_delete = {
        f"{v.package_name}||{v.release_codename}": v
        for v in statuses_to_check
    }

    for package_data in data.get("packages", []):
        name = package_data["name"]

        if packages.get(name) is None:
            package = Package(name=name)
            package.source = package_data["source"]
            package.ubuntu = package_data["ubuntu"]
            package.debian = package_data["debian"]
            packages[name] = package

            db_session.add(package)

        for status_data in package_data["statuses"]:
            update_status = False
            codename = status_data["release_codename"]

            status = statuses[name].get(codename)
            if status is None:
                update_status = True
                status = Status(cve_id=cve.id,
                                package_name=name,
                                release_codename=codename)
            elif f"{name}||{codename}" in statuses_to_delete:
                del statuses_to_delete[f"{name}||{codename}"]

            if status.status != status_data["status"]:
                update_status = True
                status.status = status_data["status"]

            if status.description != status_data["description"]:
                update_status = True
                status.description = status_data["description"]

            if status.component != status_data.get("component"):
                update_status = True
                status.component = status_data.get("component")

            if status.pocket != status_data.get("pocket"):
                update_status = True
                status.pocket = status_data.get("pocket")

            if update_status:
                statuses[name][codename] = status
                db_session.add(status)

    for key in statuses_to_delete:
        db_session.delete(statuses_to_delete[key])
Exemple #7
0
def login():
    if request.method == 'POST':
        username = request.json.get('username')
        password = request.json.get('password')
        agent = session.query(Agents).filter_by(username=username).first()
        if agent.verify_password(password):
            flask_login.login_user(agent)
            return jsonify({'login': '******'})
        else:
            return abort(401)
Exemple #8
0
    def _deserialize(self, value, attr, data, **kwargs):
        exists = False

        if "package" in inspector.get_table_names():
            exists = (db_session.query(
                Package.name).filter_by(name=value).one_or_none() is not None)

        if not exists:
            raise self.make_error("unrecognised_package_name", input=value)

        return super()._deserialize(value, attr, data, **kwargs)
Exemple #9
0
    def _deserialize(self, value, attr, data, **kwargs):
        exists = False

        if "notice" in inspector.get_table_names():
            exists = (db_session.query(Notice).filter(
                Notice.id == value).one_or_none() is not None)

        if exists:
            raise self.make_error("notice_id_exists", input=value)

        return super()._deserialize(value, attr, data, **kwargs)
Exemple #10
0
    def _deserialize(self, value, attr, data, **kwargs):
        exists = False

        if "release" in inspector.get_table_names():
            exists = (db_session.query(Release).filter(
                Release.codename == value).one_or_none() is not None)

        if exists:
            raise self.make_error("release_codename_exists", input=value)

        return super()._deserialize(value, attr, data, **kwargs)
def get_release(release_codename):
    release = (db_session.query(Release).filter(
        Release.codename == release_codename).one_or_none())

    if not release:
        return make_response(
            jsonify({"message": f"Release {release_codename} doesn't exist"}),
            404,
        )

    return release
Exemple #12
0
def merge_file():
    upload_id = request.args.get('uploadId', None)
    if not upload_id:
        return jsonify(status='invalid', reason='uploadId not present')
    has_access = False
    try:
        has_access = can_access_file(upload_id)
        if has_access:
            upload_log = db_session.query(Upload).get(upload_id)
            logger.info('Retrieved upload log, merging raw table to master')
            raw_table_name = 'raw_{}'.format(upload_id)
            logger.info('Merging raw table to master')
            merge_id = upsert_raw_table_to_master(raw_table_name,
                                                  upload_log.jurisdiction_slug,
                                                  upload_log.event_type_slug,
                                                  upload_id, db_session)
            logger.info('Syncing merged file to s3')

            bootstrap_master_tables(upload_log.jurisdiction_slug, db_session)

            sync_merged_file_to_storage(upload_log.jurisdiction_slug,
                                        upload_log.event_type_slug,
                                        db_session.get_bind())
            merge_log = db_session.query(MergeLog).get(merge_id)
            try:
                logger.info('Merge succeeded. Now querying matcher')
                notify_matcher(upload_log.jurisdiction_slug, upload_id)
            except Exception as e:
                logger.error('Error matching: ', e)
                db_session.rollback()
                return make_response(jsonify(status='error'), 500)
            db_session.commit()
            return jsonify(status='success',
                           new_unique_rows=merge_log.new_unique_rows,
                           total_unique_rows=merge_log.total_unique_rows)
        else:
            return jsonify(status='not authorized')
    except ValueError as e:
        logger.error('Error merging: ', e)
        db_session.rollback()
        return make_response(jsonify(status='error'), 500)
def _get_releases(versions):
    releases_query = db_session.query(Release).order_by(Release.release_date)

    if versions and not any(a in ["", "current"] for a in versions):
        releases_query = releases_query.filter(Release.codename.in_(versions))
    else:
        releases_query = releases_query.filter(
            or_(
                Release.support_expires > datetime.now(),
                Release.esm_expires > datetime.now(),
            )).filter(Release.codename != "upstream")

    return releases_query.all()
def delete_cve(cve_id):
    cve = db_session.query(CVE).filter(CVE.id == cve_id.upper()).one_or_none()

    if not cve:
        return make_response(
            jsonify({"message": f"CVE {cve_id} doesn't exist"}),
            404,
        )

    db_session.delete(cve)
    db_session.commit()

    return make_response(
        jsonify({"message": f"CVE with id '{cve_id}' was deleted"}), 200)
def get_cve(cve_id, **kwargs):
    show_hidden = kwargs.get("show_hidden", False)

    cve = db_session.query(CVE).filter(CVE.id == cve_id.upper()).one_or_none()

    if not cve:
        return make_response(
            jsonify({"message": f"CVE with id '{cve_id}' does not exist"}),
            404,
        )

    cve.notices = cve.get_filtered_notices(show_hidden)

    return cve
Exemple #16
0
def register():
    if request.method == 'POST':
        username = request.json.get('username')
        password = request.json.get('password')
        hostname = request.json.get('hostname')
        windows_user = request.json.get('windows_user')
        if session.query(Agents).filter_by(username=username).first():
            return jsonify({'error': 'username already taken'}), 400
        new_agent = Agents(username=username,
                           hostname=hostname,
                           windows_user=windows_user)
        new_agent.hash_password(password)
        session.add(new_agent)
        session.commit()
        return jsonify({'username': username})
def _update_notice_object(notice, data):
    """
    Set fields on a Notice model object
    """

    notice.title = data["title"]
    notice.summary = data["summary"]
    notice.details = data["description"]
    notice.release_packages = data["release_packages"]
    notice.published = data["published"]
    notice.references = data["references"]
    notice.instructions = data["instructions"]
    notice.is_hidden = data.get("is_hidden", False)

    notice.releases = [
        db_session.query(Release).get(codename)
        for codename in data["release_packages"].keys()
    ]

    notice.cves.clear()
    for cve_id in set(data["cves"]):
        notice.cves.append(db_session.query(CVE).get(cve_id) or CVE(id=cve_id))

    return notice
def delete_notice(notice_id):
    notice = (db_session.query(Notice).filter(
        Notice.id == notice_id).one_or_none())

    if not notice:
        return make_response(
            jsonify({"message": f"Notice {notice_id} doesn't exist"}),
            404,
        )

    db_session.delete(notice)
    db_session.commit()

    return make_response(jsonify({"message": f"Notice {notice_id} deleted"}),
                         200)
def get_notice(notice_id, **kwargs):
    notice_query = db_session.query(Notice)

    if not kwargs.get("show_hidden", False):
        notice_query = notice_query.filter(Notice.is_hidden == "False")

    notice = notice_query.filter(Notice.id == notice_id.upper()).one_or_none()

    if not notice:
        return make_response(
            jsonify(
                {"message": f"Notice with id '{notice_id}' does not exist"}),
            404,
        )

    return notice
def update_notice(notice_id, **kwargs):
    notice = (db_session.query(Notice).filter(
        Notice.id == notice_id).one_or_none())

    if not notice:
        return make_response(
            jsonify({"message": f"Notice '{notice_id}' doesn't exist"}),
            404,
        )

    notice = _update_notice_object(notice, request.json)

    db_session.add(notice)
    db_session.commit()

    return make_response(jsonify({"message": "Notice updated"}), 200)
    def test_good_file(self):
        with full_rig_with_s3() as (app, engine):
            response = app.post(
                '/api/upload/upload_file?jurisdiction=boone&eventType=hmis_service_stays',
                content_type='multipart/form-data',
                data={
                    'file_field': (open(GOOD_HMIS_FILE, 'rb'), 'myfile.csv')
                })
            response_data = json.loads(response.get_data().decode('utf-8'))
            assert response_data['status'] == 'validating'
            assert 'jobKey' in response_data
            assert 'message' in response_data

            job_key = response_data['jobKey']

            # get validation result and upload to s3
            response = app.get('/api/upload/validated_result/' + job_key)
            response_data = json.loads(response.get_data().decode('utf-8'))

            assert 'validation' in response_data
            assert response_data['validation']['status'] == 'valid'
            assert response_data['validation']['jobKey'] == job_key

            assert 'upload_result' in response_data
            assert 'rowCount' in response_data['upload_result']
            assert 'exampleRows' in response_data['upload_result']
            assert 'uploadId' in response_data['upload_result']
            assert 'fieldOrder' in response_data['upload_result']

            current_date = date.today().isoformat()
            expected_s3_path = 's3://test-bucket/boone/hmis_service_stays/uploaded/{}/{}'.format(
                current_date, response_data['upload_result']['uploadId'])
            with open_sesame(expected_s3_path) as expected_s3_file:
                with open_sesame(GOOD_HMIS_FILE) as source_file:
                    # we do not expect the file on s3 to be the same as the
                    # uploaded source file - missing columns should be filled in
                    s3_df = pd.read_csv(expected_s3_file)
                    source_df = pd.read_csv(source_file, sep='|')
                    assert source_df.equals(s3_df[source_df.columns.tolist()])

            assert db_session.query(Upload).filter(
                Upload.id == response_data['upload_result']['uploadId']).one
def get_notices(**kwargs):
    details = kwargs.get("details")
    cve_id = kwargs.get("cve_id")
    release = kwargs.get("release")
    limit = kwargs.get("limit", 20)
    offset = kwargs.get("offset", 0)
    order_by = kwargs.get("order")

    notices_query = db_session.query(Notice,
                                     func.count("*").over().label("total"))

    if not kwargs.get("show_hidden", False):
        notices_query = notices_query.filter(Notice.is_hidden == "False")

    if cve_id:
        notices_query = notices_query.filter(Notice.cves.any(CVE.id == cve_id))

    if release:
        notices_query = notices_query.join(
            Release, Notice.releases).filter(Release.codename == release)

    if details:
        notices_query = notices_query.filter(
            or_(
                Notice.id.ilike(f"%{details}%"),
                Notice.details.ilike(f"%{details}%"),
                Notice.title.ilike(f"%{details}%"),
                Notice.cves.any(CVE.id.ilike(f"%{details}%")),
            ))

    sort = asc if order_by == "oldest" else desc

    raw_notices = (notices_query.order_by(sort(
        Notice.published)).offset(offset).limit(limit).all())

    return {
        "notices": [raw_notice[0] for raw_notice in raw_notices],
        "offset": offset,
        "limit": limit,
        "total_results": raw_notices[0][1] if raw_notices else 0,
    }
    def test_good_file(self, request_mock):
        with full_rig_with_s3() as (app, engine):
            upload_id = self.do_upload(app, request_mock)
            # okay, here's what we really want to test.
            # call the merge endpoint
            response = app.post(
                '/api/upload/merge_file?uploadId={}'.format(upload_id))
            response_data = json.loads(response.get_data().decode('utf-8'))
            assert response_data['status'] == 'success'
            # make sure that there is a new merged file on s3
            expected_s3_path = 's3://test-bucket/boone/jail_bookings/merged'
            with open_sesame(expected_s3_path, 'rb') as expected_s3_file:
                reader = csv.reader(expected_s3_file)
                assert len([row for row in reader]) == 11

            # and make sure that the merge log has a record of this
            assert db_session.query(MergeLog).filter(
                MergeLog.upload_id == '123-456').one

            # and make sure that the raw table is no longer there
            assert not table_exists(generate_raw_table_name(upload_id),
                                    db_session.bind)
def delete_release(release_codename):
    release = (db_session.query(Release).filter(
        Release.codename == release_codename).one_or_none())

    if not release:
        return make_response(
            jsonify({"message": f"Release {release_codename} doesn't exist"}),
            404,
        )

    if len(release.statuses) > 0:
        return (
            jsonify({
                "message": (f"Cannot delete '{release_codename}' release. "
                            f"Release already in use")
            }),
            400,
        )

    db_session.delete(release)
    db_session.commit()

    return make_response(
        jsonify({"message": f"Release {release_codename} deleted"}), 200)
Exemple #25
0
 def get_category_child(self):
     return db_session.query(Blog_category).filter(
         Blog_category.category_parent == self.id).all()
Exemple #26
0
 def load_user(id):
     return db_session.query(Agents).get(int(id))
def bulk_upsert_cve(*args, **kwargs):
    cves_data = request.json

    if len(cves_data) > 50:
        return make_response(
            jsonify({
                "message": ("Please only submit up to 50 CVEs at a time. "
                            f"({len(cves_data)} submitted)")
            }),
            413,
        )

    packages = {}
    for package in db_session.query(Package).all():
        packages[package.name] = package

    for data in cves_data:
        update_cve = False
        cve = db_session.query(CVE).get(data["id"].upper())

        if cve is None:
            update_cve = True
            cve = CVE(id=data["id"])

        if cve.status != data.get("status"):
            update_cve = True
            cve.status = data.get("status")

        published_date = (cve.published.strftime("%Y-%B-%d")
                          if cve.published else None)
        data_published_date = (data.get("published").strftime("%Y-%B-%d")
                               if data.get("published") else None)
        if published_date != data_published_date:
            update_cve = True
            cve.published = data.get("published")

        if cve.priority != data.get("priority"):
            update_cve = True
            cve.priority = data.get("priority")

        if cve.cvss3 != data.get("cvss3"):
            update_cve = True
            cve.cvss3 = data.get("cvss3")

        if cve.description != data.get("description"):
            update_cve = True
            cve.description = data.get("description")

        if cve.ubuntu_description != data.get("ubuntu_description"):
            update_cve = True
            cve.ubuntu_description = data.get("ubuntu_description")

        if cve.notes != data.get("notes"):
            update_cve = True
            cve.notes = data.get("notes")

        if cve.references != data.get("references"):
            update_cve = True
            cve.references = data.get("references")

        if cve.bugs != data.get("bugs"):
            update_cve = True
            cve.bugs = data.get("bugs")

        if cve.patches != data.get("patches"):
            update_cve = True
            cve.patches = data.get("patches")

        if cve.tags != data.get("tags"):
            update_cve = True
            cve.tags = data.get("tags")

        if cve.mitigation != data.get("mitigation"):
            update_cve = True
            cve.mitigation = data.get("mitigation")

        if update_cve:
            db_session.add(cve)

        _update_statuses(cve, data, packages)

    created = defaultdict(lambda: 0)
    updated = defaultdict(lambda: 0)
    deleted = defaultdict(lambda: 0)

    for item in db_session.new:
        created[type(item).__name__] += 1

    for item in db_session.dirty:
        updated[type(item).__name__] += 1

    for item in db_session.deleted:
        deleted[type(item).__name__] += 1

    try:
        db_session.commit()
    except DataError as error:
        return make_response(
            jsonify({
                "message": "Failed bulk upserting session",
                "error": error.orig.args[0],
            }),
            400,
        )

    return make_response(
        jsonify({
            "created": created,
            "updated": updated,
            "deleted": deleted
        }),
        200,
    )
def get_cves(**kwargs):
    query = kwargs.get("q", "").strip()
    priority = kwargs.get("priority")
    package = kwargs.get("package")
    limit = kwargs.get("limit", 20)
    offset = kwargs.get("offset", 0)
    component = kwargs.get("component")
    versions = kwargs.get("version")
    statuses = kwargs.get("status")
    order_by = kwargs.get("order")
    show_hidden = kwargs.get("show_hidden", False)

    clean_versions = _get_clean_versions(statuses, versions)
    clean_statuses = _get_clean_statuses(statuses, versions)

    # query cves by filters
    cves_query = db_session.query(
        CVE,
        func.count("*").over().label("total")).filter(CVE.status == "active")

    # filter by priority
    if priority:
        cves_query = cves_query.filter(CVE.priority == priority)

    # filter by description or CVE id
    if query:
        cves_query = cves_query.filter(
            or_(
                CVE.id.ilike(f"%{query}%"),
                CVE.description.ilike(f"%{query}%"),
                CVE.ubuntu_description.ilike(f"%{query}%"),
            ))

    # build CVE statuses filter parameters
    parameters = []

    # filter by package name
    if package:
        parameters.append(Status.package_name == package)

    # filter by component
    if component:
        parameters.append(Status.component == component)

    # filter by status and version
    if not _should_filter_by_version_and_status(statuses, versions):
        # by default we look for CVEs with active statuses
        parameters.append(Status.status.in_(Status.active_statuses))
    else:
        # make initial filter for cves.statuses by status-version criteria
        conditions = []
        for key, version in enumerate(clean_versions):
            conditions.append(
                and_(
                    Status.release_codename.in_(version),
                    Status.status.in_(clean_statuses[key]),
                ))

        parameters.append(or_(*[condition for condition in conditions]))

        # filter for cve.statuses by status-version including package/component
        conditions = []
        for key, version in enumerate(clean_versions):
            sub_conditions = [
                Status.release_codename.in_(version),
                Status.status.in_(clean_statuses[key]),
                CVE.id == Status.cve_id,
            ]

            if package:
                sub_conditions.append(Status.package_name == package)

            if component:
                sub_conditions.append(Status.component == component)

            condition = Package.statuses.any(
                and_(*[sub_condition for sub_condition in sub_conditions]))

            conditions.append(condition)

        parameters.append(
            Status.package.has(and_(*[condition for condition in conditions])))

    # apply CVE statuses filter parameters
    if len(parameters) > 0:
        cves_query = cves_query.filter(
            CVE.statuses.any(and_(*[p for p in parameters])))

    sort = asc if order_by == "oldest" else desc

    cves_query = (cves_query.group_by(CVE.id).order_by(
        case(
            [(CVE.published.is_(None), 1)],
            else_=0,
        ),
        sort(CVE.published),
    ).limit(limit).offset(offset).from_self().join(CVE.statuses).options(
        contains_eager(CVE.statuses)))

    # get filtered cves
    raw_cves = cves_query.all()

    cves = []
    # filter cve.packages by parameters
    for raw_cve in raw_cves:
        cve = raw_cve[0]
        packages = cve.packages

        # filter by package name
        if package:
            packages = {
                package_name: package_statuses
                for package_name, package_statuses in packages.items()
                if package_name == package
            }

        # filter by component
        if component:
            packages = {
                package_name: package_statuses
                for package_name, package_statuses in packages.items()
                if any(status.component == component
                       for status in package_statuses.values())
            }

        # filter by status and version
        if _should_filter_by_version_and_status(statuses, versions):
            packages = {
                package_name: package_statuses
                for package_name, package_statuses in packages.items() if all(
                    any(package_status.release_codename in version
                        and package_status.status in clean_statuses[key]
                        for package_status in package_statuses.values())
                    for key, version in enumerate(clean_versions))
            }

        # refresh cve.statuses after cve.packages filter
        for package_name in packages:
            statuses = []
            for release, status in packages[package_name].items():
                statuses.append(status)

        cve.statuses = statuses
        cve.notices = cve.get_filtered_notices(show_hidden)

        cves.append(cve)

    return {
        "cves": cves,
        "offset": offset,
        "limit": limit,
        "total_results": raw_cves[0][1] if cves else 0,
    }