Example #1
0
File: reports.py Project: sorki/faf
def list():
    pagination = Pagination(request)

    filter_form = ReportFilterForm(request.args)
    if filter_form.validate():
        if request_wants_json():
            r = get_reports(filter_form, pagination)
        else:
            list_table_rows, report_count = \
                reports_list_table_rows_cache(filter_form, pagination)

            return render_template("reports/list.html",
                                   list_table_rows=list_table_rows,
                                   report_count=report_count,
                                   filter_form=filter_form,
                                   pagination=pagination)
    else:
        r = []

    if request_wants_json():
        return jsonify(dict(reports=r))

    return render_template("reports/list.html",
                           reports=r,
                           report_count=len(r),
                           filter_form=filter_form,
                           pagination=pagination)
Example #2
0
def list():
    pagination = Pagination(request)

    filter_form = ProblemFilterForm(request.args)
    if filter_form.validate():
        if request_wants_json():
            p = get_problems(filter_form, pagination)
        else:
            list_table_rows, problem_count = \
                problems_list_table_rows_cache(filter_form, pagination)

            return render_template("problems/list.html",
                                   list_table_rows=list_table_rows,
                                   problem_count=problem_count,
                                   filter_form=filter_form,
                                   pagination=pagination)
    else:
        p = []

    if request_wants_json():
        return jsonify(dict(problems=p))

    return render_template("problems/list.html",
                           problems=p,
                           problem_count=len(p),
                           filter_form=filter_form,
                           pagination=pagination)
Example #3
0
File: reports.py Project: sorki/faf
def attach():
    form = NewAttachmentForm()
    if request.method == "POST":
        try:
            if not form.validate() or form.file.name not in request.files:
                raise InvalidUsage("Invalid form data.", 400)
            raw_data = request.files[form.file.name].read()

            try:
                data = json.loads(raw_data)
            except:
                raise InvalidUsage("Invalid JSON file", 400)

            try:
                ureport.validate_attachment(data)
            except Exception as ex:
                raise InvalidUsage("Validation failed: %s" % ex, 400)

            attachment = data

            max_attachment_length = 2048

            if len(str(attachment)) > max_attachment_length:
                err = "uReport attachment may only be {0} bytes long" \
                      .format(max_attachment_length)
                raise InvalidUsage(err, 413)

            fname = str(uuid.uuid4())
            fpath = os.path.join(paths["attachments_incoming"], fname)
            with open(fpath, "w") as file:
                file.write(raw_data)

            if request_wants_json():
                json_response = jsonify({"result": True})
                json_response.status_code = 202
                return json_response
            else:
                flash("The attachment was saved successfully. Thank you.",
                      "success")
                return render_template("reports/attach.html",
                                       form=form), 202

        except InvalidUsage as e:
            if request_wants_json():
                response = jsonify({"error": e.message})
                response.status_code = e.status_code
                return response
            else:
                flash(e.message, "danger")
                return render_template("reports/attach.html",
                                       form=form), e.status_code

    return render_template("reports/attach.html",
                           form=form)
Example #4
0
def _send_confirmation(email, host):
    '''
    Helper that actually creates confirmation nonce
    and sends the email to associated email. Renders
    different templates depending on the result
    '''
    log.debug('Sending confirmation')
    if VALID_NONCE(HASH(email, host)):
        log.debug('Confirmation already sent')
        if request_wants_json():
            return jsonify({'success': "confirmation email sent"})
        else:
            return render_template('confirmation_sent.html', email=email, host=host)

    link = url_for('confirm_email', nonce=HASH(email, host), _external=True)
    
    def render_content(type):
        return render_template('email/confirm.%s' % type, 
                                  email=email, 
                                  host=host, 
                                  nonce_link=link)

    log.debug('Sending email')

    result = _send_email(to=email, 
                         subject='Confirm email for %s' % settings.SERVICE_NAME, 
                         text=render_content('txt'),
                         html=render_content('html'), 
                         sender=DEFAULT_SENDER) 

    log.debug('Sent')

    if not result[0]:
        if request_wants_json():
            return jsonerror(500, {'error': "Unable to send email"})
        else:
            return render_template('error.html', 
                                   title='Unable to send email', 
                                   text=result[1]), 500


    REDIS.set(NONCE_KEY(email, host), None)
    REDIS.set(HASH_EMAIL_KEY(HASH(email, host)), email)
    REDIS.set(HASH_HOST_KEY(HASH(email, host)), host)

    if request_wants_json():
        return jsonify({'success': "confirmation email sent"})
    else:
        return render_template('confirmation_sent.html', email=email, host=host)
Example #5
0
def _send_form(email, host):
    '''
    Sends request.form to user's email. 
    Assumes email has been verified.
    '''

    data, keys = _form_to_dict(request.form)

    subject = data.get('_subject', 'New submission from %s' % _referrer_to_path(request.referrer))
    reply_to = data.get('_replyto', None)
    cc = data.get('_cc', None)
    next = data.get('_next', url_for('thanks', next=request.referrer))
    spam = data.get('_gotcha', None)

    # prevent submitting empty form
    if not any(data.values()):
        if request_wants_json():
            return k(400, {'error': "Can't send an empty form"})
        else:
            return render_template('error.html', 
                                   title='Can\'t send an empty form', 
                                   text=str('<a href="%s">Return to form</a>' % request.referrer)), 400

    if not spam:
        text = render_template('email/form.txt', data=data, host=host, keys=keys)
        html = render_template('email/form.html', data=data, host=host, keys=keys)
        result = _send_email(to=email, 
                          subject=subject,
                          text=text,
                          html=html,
                          sender=DEFAULT_SENDER,
                          reply_to=reply_to,
                          cc=cc)

        if not result[0]:
            if request_wants_json():
                return jsonerror(500, {'error': "Unable to send email"})
            else:
                return render_template('error.html', 
                                       title='Unable to send email', 
                                       text=result[1]), 500

        REDIS.incr(COUNTER_KEY(email, host))

    if request_wants_json():
        return jsonify({'success': "Email sent"})
    else:
        return redirect(next, code=302)
Example #6
0
def get_hash(os=None, release=None, since=None, to=None):
    if to:
        to = datetime.datetime.strptime(to, "%Y-%m-%d")
        since = datetime.datetime.strptime(since, "%Y-%m-%d")

        report_hash = queries.get_all_report_hashes(db, opsys=os,
                                                    opsys_releases=release,
                                                    date_from=since,
                                                    date_to=to)

    elif since:
        since = datetime.datetime.strptime(since, "%Y-%m-%d")

        report_hash = queries.get_all_report_hashes(db, opsys=os,
                                                    opsys_releases=release,
                                                    date_from=since)

    elif release:
        report_hash = queries.get_all_report_hashes(db, opsys=os,
                                                    opsys_releases=release)

    elif os:
        report_hash = queries.get_all_report_hashes(db, opsys=os)
    else:
        report_hash = queries.get_all_report_hashes(db)

    r_hash = []

    for item in report_hash:
        r_hash.append(item.hash)

    if request_wants_json():
        return jsonify({"data": r_hash})
    else:
        abort(405)
Example #7
0
def get_hash(os=None, release=None, since=None, to=None):
    if to:
        to = datetime.datetime.strptime(to, "%Y-%m-%d")
        since = datetime.datetime.strptime(since, "%Y-%m-%d")

        report_hash = queries.get_all_report_hashes(db, opsys=os,
                                                    opsys_releases=release,
                                                    date_from=since,
                                                    date_to=to)

    elif since:
        since = datetime.datetime.strptime(since, "%Y-%m-%d")

        report_hash = queries.get_all_report_hashes(db, opsys=os,
                                                    opsys_releases=release,
                                                    date_from=since)

    elif release:
        report_hash = queries.get_all_report_hashes(db, opsys=os,
                                                    opsys_releases=release)

    elif os:
        report_hash = queries.get_all_report_hashes(db, opsys=os)
    else:
        report_hash = queries.get_all_report_hashes(db)

    r_hash = []

    for item in report_hash:
        r_hash.append(item.hash)

    if request_wants_json():
        return jsonify({"data": r_hash})
    else:
        abort(405)
def item(category_name, item_name):
    user = getUser()
    category_items = getItemsByCategory(category_name)
    category = getCategory(category_name)
    item = findItem(item_name, category_items)
    if item:
        if request_wants_json():
            return jsonify(item.serialize)
        return render_template('item.html', item=item, items=category_items,
                               category=category, user=user)
    else:
        return json_response('Item not found', 404)
Example #9
0
def torrents(start=0):
    count = request.args.get('count', PAGECOUNT)
    tsummary = []
    total = rc.zcard("torrents")
    for info_hash in rc.zrange("torrents", start, start + count):
        user = rc.hget("info|%s" % info_hash, 'user')
        tsummary.append((info_hash, user))

    if request_wants_json():
        return jsonify(tsummary)
    else:
        return render_template("torrents.html", torrents=tsummary, start=start, count=count, total=total)
Example #10
0
def send(email):
    ''' 
    Main endpoint, checks if email+host is valid and sends 
    either form data or verification to email 
    '''

    if request.method == 'GET':
        if request_wants_json():
            return jsonerror(405, {'error': "Please submit POST request."})
        else:
            return render_template('info.html', 
                                   title='Form should POST', 
                                   text='Make sure your form has the <span class="code"><strong>method="POST"</strong></span> attribute'), 405

    if not IS_VALID_EMAIL(email):
        if request_wants_json():
            return jsonerror(400, {'error': "Invalid email address"})
        else:
            return render_template('error.html', 
                                   title='Check email address', 
                                   text='Email address %s is not formatted correctly' % str(email)), 400

    # Earlier we used referrer, which is problematic as it includes also URL
    # parameters. To maintain backwards compatability and to avoid doing migrations
    # check also if email is confirmed for the entire referrer
    host = flask.request.referrer
    new_host = _referrer_to_path(host)

    if not host:
        if request_wants_json():
            return jsonerror(400, {'error': "Invalid \"Referrer\" header"})
        else:
            return render_template('error.html', 
                                   title='Unable to submit form', 
                                   text='Make sure your form is running on a proper server. For geeks: could not find the "Referrer" header.'), 400

    if not EMAIL_CONFIRMED(HASH(email, host)) and not EMAIL_CONFIRMED(HASH(email, new_host)):
        return _send_confirmation(email, new_host)

    return _send_form(email, new_host)
def index():
    user = getUser()
    categories = getCategories()
    items = getItems()

    if request_wants_json():
        index_data = {
            'categories': [category.serialize for category in categories],
            'recent_items': [item.serialize for item in items]}
        return jsonify(index_data)

    return render_template('index.html', categories=categories, items=items,
                           user=user)
Example #12
0
File: reports.py Project: trams/faf
def list():
    pagination = Pagination(request)

    filter_form = ReportFilterForm(request.args)
    filter_form.components.choices = component_list()
    if filter_form.validate():
        opsysrelease_ids = [
            osr.id for osr in (filter_form.opsysreleases.data or [])]

        component_ids = []
        for comp in filter_form.components.data or []:
            component_ids += map(int, comp.split(','))

        if filter_form.associate.data:
            associate_id = filter_form.associate.data.id
        else:
            associate_id = None
        arch_ids = [arch.id for arch in (filter_form.arch.data or [])]

        types = filter_form.type.data or []

        r = query_reports(
            db,
            opsysrelease_ids=opsysrelease_ids,
            component_ids=component_ids,
            associate_id=associate_id,
            arch_ids=arch_ids,
            types=types,
            first_occurrence_since=filter_form.first_occurrence_daterange.data
            and filter_form.first_occurrence_daterange.data[0],
            first_occurrence_to=filter_form.first_occurrence_daterange.data
            and filter_form.first_occurrence_daterange.data[1],
            last_occurrence_since=filter_form.last_occurrence_daterange.data
            and filter_form.last_occurrence_daterange.data[0],
            last_occurrence_to=filter_form.last_occurrence_daterange.data
            and filter_form.last_occurrence_daterange.data[1],
            limit=pagination.limit,
            offset=pagination.offset,
            order_by=filter_form.order_by.data)
    else:
        r = []

    if request_wants_json():
        return jsonify(dict(reports=r))

    return render_template("reports/list.html",
                           reports=r,
                           filter_form=filter_form,
                           pagination=pagination)
Example #13
0
def torrent_info(info_hash):
    if rc.zscore("torrents", info_hash) is None:
        abort(404)

    tm = rc.hgetall("info|%s" % info_hash)
    torrent_enc = rc.get(info_hash)
    torrent_data = bdecode(torrent_enc)
    torrent_data.update(tm)
    torrent_data['creation_time'] = datetime.fromtimestamp(torrent_data.get('creation date', 0))
    if 'pieces' in torrent_data['info']:
        del torrent_data['info']['pieces']
    if request_wants_json():
        return jsonify(torrent_data)
    else:
        return render_template("torrent.html", **torrent_data)
Example #14
0
def del_torrent(info_hash):
    if has_perm(info_hash, 'd'):
        try:
            if rc.exists("perm|%s" % info_hash):
                rc.delete("perm|%s" % info_hash)
            if rc.exists("info|%s" % info_hash):
                rc.delete("info|%s" % info_hash)
            if rc.exists(info_hash):
                rc.delete(info_hash)
            rc.zrem("torrents", info_hash)
        except Exception, ex:
            traceback.print_exc()
        if request_wants_json():
            return jsonify(dict(info_hash=info_hash))
        else:
            return redirect("/datasets")
def category(category_name):
    user = getUser()
    categories = getCategories()
    category = findCategory(category_name, categories)

    if category:
        items = getItemsByCategory(category_name)

        if request_wants_json():
            category_dict = category.serialize
            category_dict['items'] = [item.serialize for item in items]
            return jsonify(category_dict)
        return render_template('index.html', category=category,
                               items=items, categories=categories, user=user)
    else:
        return json_response('Category not found', 404)
Example #16
0
def item(problem_id):
    problem = db.session.query(Problem).filter(
        Problem.id == problem_id).first()

    if problem is None:
        raise abort(404)

    report_ids = [report.id for report in problem.reports]

    sub = (db.session.query(ReportOpSysRelease.opsysrelease_id,
                            func.sum(ReportOpSysRelease.count).label("cnt"))
           .join(Report)
           .filter(Report.id.in_(report_ids))
           .group_by(ReportOpSysRelease.opsysrelease_id)
           .subquery())

    osreleases = (db.session.query(OpSysRelease, sub.c.cnt)
                            .join(sub)
                            .order_by(desc("cnt"))
                            .all())

    sub = (db.session.query(ReportArch.arch_id,
                            func.sum(ReportArch.count).label("cnt"))
           .join(Report)
           .filter(Report.id.in_(report_ids))
           .group_by(ReportArch.arch_id)
           .subquery())

    arches = (db.session.query(Arch, sub.c.cnt).join(sub)
                        .order_by(desc("cnt"))
                        .all())

    exes = (db.session.query(ReportExecutable.path,
                             func.sum(ReportExecutable.count).label("cnt"))
            .join(Report)
            .filter(Report.id.in_(report_ids))
            .group_by(ReportExecutable.path)
            .order_by(desc("cnt"))
            .all())

    sub = (db.session.query(ReportPackage.installed_package_id,
                            func.sum(ReportPackage.count).label("cnt"))
           .join(Report)
           .filter(Report.id.in_(report_ids))
           .group_by(ReportPackage.installed_package_id)
           .subquery())
    packages_known = db.session.query(Package, sub.c.cnt).join(sub).all()

    packages_unknown = (db.session.query(ReportUnknownPackage,
                                         ReportUnknownPackage.count)
                                  .join(Report)
                                  .filter(Report.id.in_(report_ids))).all()

    packages = packages_known + packages_unknown

    # creates a package_counts list with this structure:
    # [(package name, count, [(package version, count in the version)])]
    names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)})
    for (pkg, cnt) in packages:
        names[pkg.name]["name"] = pkg.name
        names[pkg.name]["count"] += cnt
        names[pkg.name]["versions"][pkg.evr()] += cnt

    package_counts = []
    for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True):
        package_counts.append((
            pkg["name"],
            pkg["count"],
            sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True)))

    for report in problem.reports:
        for backtrace in report.backtraces:
            fid = 0
            for frame in backtrace.frames:
                fid += 1
                frame.nice_order = fid

    bt_hashes = (db.session.query(ReportHash.hash)
                           .join(Report)
                           .join(Problem)
                           .filter(Problem.id == problem_id)
                           .distinct(ReportHash.hash).all())
    # Limit to 10 bt_hashes (otherwise the URL can get too long)
    # Select the 10 hashes uniformly from the entire list to make sure it is a
    # good representation. (Slicing the 10 first could mean the 10 oldest
    # are selected which is not a good representation.)
    k = min(len(bt_hashes), 10)
    a = 0
    d = len(bt_hashes)/float(k)
    bt_hashes_limited = []
    for i in range(k):
        bt_hashes_limited.append("bth=" + bt_hashes[int(a)][0])
        a += d
    bt_hash_qs = "&".join(bt_hashes_limited)

    forward = {"problem": problem,
               "osreleases": metric(osreleases),
               "arches": metric(arches),
               "exes": metric(exes),
               "package_counts": package_counts,
               "bt_hash_qs": bt_hash_qs
               }

    if request_wants_json():
        return jsonify(forward)

    is_maintainer = is_problem_maintainer(db, g.user, problem)
    forward["is_maintainer"] = is_maintainer

    forward["extfafs"] = get_external_faf_instances(db)

    if report_ids:
        bt_diff_form = BacktraceDiffForm()
        bt_diff_form.lhs.choices = [(id, id) for id in report_ids]
        bt_diff_form.rhs.choices = bt_diff_form.lhs.choices
        forward['bt_diff_form'] = bt_diff_form

    return render_template("problems/item.html", **forward)
Example #17
0
def new(url_fname=None):
    """
    Handle dump dir archive uploads
    """

    form = NewDumpDirForm()
    if request.method in ["POST", "PUT"]:
        try:
            if request.method == "POST":
                if not form.validate() or form.file.name not in request.files:
                    raise InvalidUsage("Invalid form data.", 400)

                archive_file = request.files[form.file.name]
                archive_fname = archive_file.filename

            if request.method == "PUT":
                archive_file = StringIO(request.stream.read())
                archive_fname = url_fname

            archive_file.seek(0, os.SEEK_END)
            archive_size = archive_file.tell()
            archive_file.seek(0)

            if not archive_size:
                raise InvalidUsage("Empty archive received", 400)

            if not check_filename(archive_fname):
                raise InvalidUsage("Wrong archive file name", 400)

            # sanitize input filename just to be sure
            archive_fname = secure_filename(archive_fname)

            if not os.path.exists(paths["dumpdir"]):
                raise InvalidUsage("That's embarrassing! We have some troubles"
                                   " with deployment. Please try again later.",
                                   500)

            count = 0
            try:
                count = sum(
                    1 for x in os.listdir(paths["dumpdir"])
                    if os.path.isfile(os.path.join(paths["dumpdir"], x)))

            except Exception as e:
                raise InvalidUsage("That's embarrassing! We have some troubles"
                                   " with storage. Please try again later.",
                                   500)

            if count >= int(config["dumpdir.cachedirectorycountquota"]):
                raise InvalidUsage("That's embarrassing! We have reached"
                                   " the limit of uploaded archives."
                                   " Please try again later.",
                                   500)

            if archive_size > int(config["dumpdir.maxdumpdirsize"]):
                raise InvalidUsage("Dump dir archive is too large", 413)

            used_space = 0.0
            try:
                used_space = sum(
                    float(os.path.getsize(x))
                    for x in map(lambda f: os.path.join(paths["dumpdir"], f),
                                 os.listdir(paths["dumpdir"]))
                    if os.path.isfile(x))
            except Exception as e:
                raise InvalidUsage("That's embarrasing! We have some"
                                   " troubles with disk space."
                                   " Please try again later.",
                                   500)

            quota = int(config["dumpdir.cachedirectorysizequota"])
            if (quota - archive_size) < used_space:
                raise InvalidUsage("That's embarrassing! We ran out"
                                   " of disk space."
                                   " Please try again later.",
                                   500)

            fpath = os.path.join(paths["dumpdir"], archive_fname)

            if os.path.exists(fpath):
                raise InvalidUsage("Dump dir archive already exists.", 409)

            with open(fpath, 'w') as dest:
                dest.write(archive_file.read())

            if request_wants_json():
                response = jsonify({"ok": "ok"})
                response.status_code = 201
                return response
            else:
                flash("Uploaded successfully.")
                return render_template("dumpdirs/new.html",
                                       form=form)

        except InvalidUsage as e:
            if e.status_code == 500:
                logger.error(e.message)
            elif e.status_code >= 400:
                logger.warning(e.message)

            if request_wants_json():
                response = jsonify({"error": e.message})
                response.status_code = e.status_code
                return response
            else:
                flash(e.message, "danger")
                return render_template("dumpdirs/new.html",
                                       form=form), e.status_code

    return render_template("dumpdirs/new.html",
                           form=form)
Example #18
0
File: reports.py Project: sorki/faf
def new():
    form = NewReportForm()
    if request.method == "POST":
        try:
            if not form.validate() or form.file.name not in request.files:
                raise InvalidUsage("Invalid form data.", 400)
            raw_data = request.files[form.file.name].read()
            try:
                data = json.loads(raw_data)
            except Exception as ex:
                _save_invalid_ureport(db, raw_data, str(ex))
                raise InvalidUsage("Couldn't parse JSON data.", 400)

            try:
                ureport.validate(data)
            except Exception as exp:
                reporter = None
                if ("reporter" in data and
                        "name" in data["reporter"] and
                        "version" in data["reporter"]):
                    reporter = "{0} {1}".format(data["reporter"]["name"],
                                                data["reporter"]["version"])

                _save_invalid_ureport(db, json.dumps(data, indent=2),
                                      str(exp), reporter=reporter)

                if ("os" in data and
                        "name" in data["os"] and
                        data["os"]["name"] not in systems and
                        data["os"]["name"].lower() not in systems):
                    _save_unknown_opsys(db, data["os"])

                raise InvalidUsage("uReport data is invalid.", 400)

            report = data

            max_ureport_length = InvalidUReport.__lobs__["ureport"]

            if len(str(report)) > max_ureport_length:
                raise InvalidUsage("uReport may only be {0} bytes long"
                                   .format(max_ureport_length), 413)

            osr_id = None
            osr = None
            if report["os"]["name"] in systems:
                osr = (db.session.query(OpSysRelease)
                       .join(OpSys)
                       .filter(OpSys.name ==
                               systems[report["os"]["name"]].nice_name)
                       .filter(OpSysRelease.version ==
                               report["os"]["version"])
                       .first())

                if osr:
                    osr_id = osr.id
            try:
                dbreport = ureport.is_known(report, db, return_report=True,
                                            opsysrelease_id=osr_id)
            except Exception as e:
                logging.exception(e)
                dbreport = None

            known = bool(dbreport)
            fname = str(uuid.uuid4())
            fpath = os.path.join(paths["reports_incoming"], fname)
            with open(fpath, 'w') as file:
                file.write(raw_data)

            if request_wants_json():
                response = {'result': known}

                try:
                    report2 = ureport2(report)
                except FafError:
                    report2 = None

                if report2 is not None:
                    solution = find_solution(report2, db=db, osr=osr)
                    if solution is not None:
                        response["message"] = (
                            "Your problem seems to be caused by {0}\n\n"
                            "{1}".format(solution.cause, solution.note_text))

                        if solution.url:
                            response["message"] += (
                                "\n\nYou can get more information at {0}"
                                .format(solution.url))

                        solution_dict = {"cause": solution.cause,
                                         "note":  solution.note_text,
                                         "url":   solution.url}
                        if not solution_dict["url"]:
                            del solution_dict["url"]
                        response["solutions"] = [solution_dict]
                        response["result"] = True

                    try:
                        problemplugin = problemtypes[
                            report2["problem"]["type"]]
                        response["bthash"] = problemplugin.hash_ureport(
                            report2["problem"])
                    except Exception as e:
                        logging.exception(e)
                        pass

                if known:
                    url = url_for('reports.item', report_id=dbreport.id,
                                  _external=True)
                    parts = [{"reporter": "ABRT Server",
                              "value": url,
                              "type": "url"}]

                    bugs = (db.session.query(BzBug)
                                      .join(ReportBz)
                                      .filter(ReportBz.bzbug_id == BzBug.id)
                                      .filter(ReportBz.report_id == dbreport.id)
                                      .all())
                    for bug in bugs:
                        parts.append({"reporter": "Bugzilla",
                                      "value": bug.url,
                                      "type": "url"})

                    if 'message' not in response:
                        response['message'] = ''
                    else:
                        response['message'] += '\n\n'

                    response[
                        'message'] += "\n".join(p["value"] for p in parts
                                                if p["type"].lower() == "url")
                    response['reported_to'] = parts

                json_response = jsonify(response)
                json_response.status_code = 202
                return json_response
            else:
                flash(
                    "The uReport was saved successfully. Thank you.", "success")
                return render_template("reports/new.html",
                                       form=form), 202

        except InvalidUsage as e:
            if request_wants_json():
                response = jsonify({"error": e.message})
                response.status_code = e.status_code
                return response
            else:
                flash(e.message, "danger")
                return render_template("reports/new.html",
                                       form=form), e.status_code

    return render_template("reports/new.html",
                           form=form)
Example #19
0
File: reports.py Project: sorki/faf
def item(report_id):
    result = (db.session.query(Report, OpSysComponent)
              .join(OpSysComponent)
              .filter(Report.id == report_id)
              .first())

    if result is None:
        abort(404)

    report, component = result

    releases = (db.session.query(ReportOpSysRelease, ReportOpSysRelease.count)
                .filter(ReportOpSysRelease.report_id == report_id)
                .order_by(desc(ReportOpSysRelease.count))
                .all())

    arches = (db.session.query(ReportArch, ReportArch.count)
              .filter(ReportArch.report_id == report_id)
              .order_by(desc(ReportArch.count))
              .all())

    modes = (db.session.query(ReportSelinuxMode, ReportSelinuxMode.count)
             .filter(ReportSelinuxMode.report_id == report_id)
             .order_by(desc(ReportSelinuxMode.count))
             .all())

    history_select = lambda table, date: (db.session.query(table).
                                          filter(table.report_id == report_id)
                                          # Flot is confused if not ordered
                                          .order_by(date)
                                          .all())

    daily_history = history_select(ReportHistoryDaily, ReportHistoryDaily.day)
    weekly_history = history_select(ReportHistoryWeekly, ReportHistoryWeekly.week)
    monthly_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month)

    packages = load_packages(db, report_id)

    # creates a package_counts list with this structure:
    # [(package name, count, [(package version, count in the version)])]
    names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)})
    for pkg in packages:
        names[pkg.iname]["name"] = pkg.iname
        names[pkg.iname]["count"] += pkg.count
        names[pkg.iname]["versions"]["{0}:{1}-{2}"
            .format(pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count

    package_counts = []
    for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True):
        package_counts.append((
            pkg["name"],
            pkg["count"],
            sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True)))

    try:
        backtrace = report.backtraces[0].frames
    except:
        backtrace = []

    fid = 0
    for frame in backtrace:
        fid += 1
        frame.nice_order = fid

    is_maintainer = is_component_maintainer(db, g.user, component)

    contact_emails = []
    if is_maintainer:
        contact_emails = [email_address for (email_address, ) in
                          (db.session.query(ContactEmail.email_address)
                                     .join(ReportContactEmail)
                                     .filter(ReportContactEmail.report == report))]

    forward = dict(report=report,
                   component=component,
                   releases=metric(releases),
                   arches=metric(arches),
                   modes=metric(modes),
                   daily_history=daily_history,
                   weekly_history=weekly_history,
                   monthly_history=monthly_history,
                   crashed_packages=packages,
                   package_counts=package_counts,
                   backtrace=backtrace,
                   contact_emails=contact_emails)

    if request_wants_json():
        return jsonify(forward)

    forward["is_maintainer"] = is_maintainer
    forward["extfafs"] = get_external_faf_instances(db)
    return render_template("reports/item.html", **forward)
Example #20
0
File: reports.py Project: trams/faf
def item(report_id):
    result = (db.session.query(Report, OpSysComponent)
              .join(OpSysComponent)
              .filter(Report.id == report_id)
              .first())

    if result is None:
        abort(404)

    report, component = result

    releases = (db.session.query(ReportOpSysRelease, ReportOpSysRelease.count)
                .filter(ReportOpSysRelease.report_id == report_id)
                .order_by(desc(ReportOpSysRelease.count))
                .all())

    arches = (db.session.query(ReportArch, ReportArch.count)
              .filter(ReportArch.report_id == report_id)
              .order_by(desc(ReportArch.count))
              .all())

    modes = (db.session.query(ReportSelinuxMode, ReportSelinuxMode.count)
             .filter(ReportSelinuxMode.report_id == report_id)
             .order_by(desc(ReportSelinuxMode.count))
             .all())

    history_select = lambda table: (db.session.query(table).
                                    filter(table.report_id == report_id)
                                    .all())

    daily_history = history_select(ReportHistoryDaily)
    weekly_history = history_select(ReportHistoryWeekly)
    monthly_history = history_select(ReportHistoryMonthly)

    packages = load_packages(db, report_id, "CRASHED")
    related_packages = load_packages(db, report_id, "RELATED")
    related_packages_nevr = sorted(
        [metric_tuple(name="{0}-{1}:{2}-{3}".format(
            pkg.iname, pkg.iepoch, pkg.iversion, pkg.irelease),
            count=pkg.count) for pkg in related_packages],
        key=itemgetter(0))

    merged_name = dict()
    for package in related_packages:
        if package.iname in merged_name:
            merged_name[package.iname] += package.count
        else:
            merged_name[package.iname] = package.count

    related_packages_name = sorted([metric_tuple(name=item[0], count=item[1])
                                    for item in merged_name.items()],
                                   key=itemgetter(0),
                                   reverse=True)

    try:
        backtrace = report.backtraces[0].frames
    except:
        backtrace = []

    fid = 0
    for frame in backtrace:
        fid += 1
        frame.nice_order = fid

    forward = dict(report=report,
                   component=component,
                   releases=metric(releases),
                   arches=metric(arches),
                   modes=metric(modes),
                   daily_history=daily_history,
                   weekly_history=weekly_history,
                   monthly_history=monthly_history,
                   crashed_packages=packages,
                   related_packages_nevr=related_packages_nevr,
                   related_packages_name=related_packages_name,
                   backtrace=backtrace)

    if request_wants_json():
        return jsonify(forward)

    return render_template("reports/item.html", **forward)
Example #21
0
def item(report_id, want_object=False):
    result = (db.session.query(Report, OpSysComponent)
              .join(OpSysComponent)
              .filter(Report.id == report_id)
              .first())

    if result is None:
        abort(404)

    report, component = result

    solutions = None

    if report.max_certainty is not None:
        osr = get_report_opsysrelease(db=db, report_id=report.id)
        solutions = [find_solution(report, db=db, osr=osr)]

    releases = (db.session.query(ReportOpSysRelease, ReportOpSysRelease.count)
                .filter(ReportOpSysRelease.report_id == report_id)
                .order_by(desc(ReportOpSysRelease.count))
                .all())

    arches = (db.session.query(ReportArch, ReportArch.count)
              .filter(ReportArch.report_id == report_id)
              .order_by(desc(ReportArch.count))
              .all())

    modes = (db.session.query(ReportSelinuxMode, ReportSelinuxMode.count)
             .filter(ReportSelinuxMode.report_id == report_id)
             .order_by(desc(ReportSelinuxMode.count))
             .all())

    history_select = lambda table, date, date_range: (db.session.query(table).
                                                      filter(table.report_id == report_id)
                                                      .filter(date >= date_range)
                                                      # Flot is confused if not ordered
                                                      .order_by(date)
                                                      .all())

    MAX_DAYS = 20  # Default set on 20
    MAX_WEEK = 20  # Default set on 20
    MAX_MONTH = 20  # Default set on 20

    today = datetime.date.today()

    # Show only 20 days
    daily_history = history_select(ReportHistoryDaily, ReportHistoryDaily.day,
                                   (today - timedelta(days=MAX_DAYS)))

    if len(daily_history) == 0:
        for x in range(0, MAX_DAYS):
            daily_history.append({'day': today - timedelta(x),
                                  'count': 0,
                                  'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})

    elif len(daily_history) < MAX_DAYS:
        if daily_history[-1].day < (today):
            daily_history.append({'day': today,
                                  'count': 0,
                                  'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id
                                  })

        if daily_history[0].day > (today - timedelta(MAX_DAYS)):
            daily_history.append({'day': today - timedelta(MAX_DAYS),
                                  'count': 0,
                                  'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id
                                  })

    # Show only 20 weeks
    last_monday = datetime.datetime.today() - timedelta(datetime.datetime.today().weekday())

    weekly_history = history_select(ReportHistoryWeekly, ReportHistoryWeekly.week,
                                    (last_monday - timedelta(days=MAX_WEEK*7)))
    if len(weekly_history) == 0:
        for x in range(0, MAX_WEEK):
            weekly_history.append({'week': last_monday - timedelta(x*7),
                                   'count': 0,
                                   'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})
    elif len(weekly_history) < MAX_WEEK:
        if weekly_history[-1].week < (last_monday.date()):
            weekly_history.append({'week': last_monday,
                                   'count': 0,
                                   'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})

        if weekly_history[0].week > ((last_monday - timedelta(7*MAX_WEEK)).date()):
            weekly_history.append({'week': last_monday - timedelta(7*MAX_WEEK),
                                   'count': 0,
                                   'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})

    # Show only 20 months
    monthly_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month,
                                     (today - relativedelta(months=MAX_MONTH)))

    first_day_of_month = lambda t: (datetime.date(t.year, t.month, 1))

    fdom = first_day_of_month(datetime.datetime.today())

    if len(monthly_history) == 0:
        for x in range(0, MAX_MONTH):
            monthly_history.append({'month': fdom - relativedelta(months=x),
                                   'count': 0,
                                   'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})

    elif len(monthly_history) < MAX_MONTH:
        if monthly_history[-1].month < (fdom):
            monthly_history.append({'month': fdom,
                                   'count': 0,
                                   'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})

        if monthly_history[0].month > (fdom - relativedelta(months=MAX_MONTH)):
            monthly_history.append({'month': fdom - relativedelta(months=MAX_MONTH),
                                   'count': 0,
                                   'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})

    complete_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month,
                                    (datetime.datetime.strptime('1970-01-01', '%Y-%m-%d')))

    unique_ocurrence_os = {}
    if len(complete_history) > 0:
        for ch in complete_history:
            os_name = "{0} {1}".format(ch.opsysrelease.opsys.name, ch.opsysrelease.version)

            if ch.count is None:
                ch.count = 0

            if ch.unique is None:
                ch.count = 0

            if os_name not in unique_ocurrence_os:
                unique_ocurrence_os[os_name] = {'count': ch.count, 'unique': ch.unique}
            else:
                unique_ocurrence_os[os_name]['count'] += ch.count
                unique_ocurrence_os[os_name]['unique'] += ch.unique

    sorted(unique_ocurrence_os)

    packages = load_packages(db, report_id)

    # creates a package_counts list with this structure:
    # [(package name, count, [(package version, count in the version)])]
    names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)})
    for pkg in packages:
        names[pkg.iname]["name"] = pkg.iname
        names[pkg.iname]["count"] += pkg.count
        names[pkg.iname]["versions"]["{0}:{1}-{2}"
            .format(pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count

    package_counts = []
    for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True):
        package_counts.append((
            pkg["name"],
            pkg["count"],
            sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True)))

    try:
        backtrace = report.backtraces[0].frames
    except:
        backtrace = []

    fid = 0
    for frame in backtrace:
        fid += 1
        frame.nice_order = fid

    is_maintainer = is_component_maintainer(db, g.user, component)

    contact_emails = []
    if is_maintainer:
        contact_emails = [email_address for (email_address, ) in
                          (db.session.query(ContactEmail.email_address)
                                .join(ReportContactEmail)
                                .filter(ReportContactEmail.report == report))]

    maintainer = (db.session.query(AssociatePeople)
                        .join(OpSysReleaseComponentAssociate)
                        .join(OpSysReleaseComponent)
                        .join(OpSysComponent)
                        .filter(OpSysComponent.name == component.name)).first()

    maintainer_contact = ""
    if maintainer:
        maintainer_contact = maintainer.name

    probably_fixed = (db.session.query(ProblemOpSysRelease, Build)
                      .join(Problem)
                      .join(Report)
                      .join(Build)
                      .filter(Report.id == report_id)
                      .first())

    forward = dict(report=report,
                   probably_fixed=probably_fixed,
                   component=component,
                   releases=metric(releases),
                   arches=metric(arches),
                   modes=metric(modes),
                   daily_history=daily_history,
                   weekly_history=weekly_history,
                   monthly_history=monthly_history,
                   complete_history=complete_history,
                   unique_ocurrence_os=unique_ocurrence_os,
                   crashed_packages=packages,
                   package_counts=package_counts,
                   backtrace=backtrace,
                   contact_emails=contact_emails,
                   solutions=solutions,
                   maintainer_contact=maintainer_contact)

    forward['error_name'] = report.error_name
    forward['oops'] = report.oops

    if want_object:
        try:
            cf = component.name
            if len(report.backtraces[0].crash_function) > 0:
                cf += " in {0}".format(report.backtraces[0].crash_function)
            forward['crash_function'] = cf
        except:
            forward['crash_function'] = ""

        if probably_fixed:
            tmp_dict = probably_fixed.ProblemOpSysRelease.serialize
            tmp_dict['probable_fix_build'] = probably_fixed.Build.serialize

            forward['probably_fixed'] = tmp_dict
        # Avg count occurrence from first to last occurence
        forward['avg_count_per_month'] = get_avg_count(report.first_occurrence,
                                                           report.last_occurrence,
                                                           report.count)

        if len(forward['report'].bugs) > 0:
            forward['bugs'] = []
            for bug in forward['report'].bugs:
                try:
                    forward['bugs'].append(bug.serialize)
                except:
                    print "Bug serialize failed"
        return forward

    if request_wants_json():
        return jsonify(forward)

    forward["is_maintainer"] = is_maintainer
    forward["extfafs"] = get_external_faf_instances(db)

    return render_template("reports/item.html", **forward)
Example #22
0
        try:
            if rc.exists("perm|%s" % info_hash):
                rc.delete("perm|%s" % info_hash)
            if rc.exists("info|%s" % info_hash):
                rc.delete("info|%s" % info_hash)
            if rc.exists(info_hash):
                rc.delete(info_hash)
            rc.zrem("torrents", info_hash)
        except Exception, ex:
            traceback.print_exc()
        if request_wants_json():
            return jsonify(dict(info_hash=info_hash))
        else:
            return redirect("/datasets")
    else:
        if request_wants_json():
            raise PermissionDenied()
        else:
            abort(401)


@app.route('/api/dataset/<regex("[a-f0-9]+"):info_hash>/permissions', methods=['GET'])
@docs.doc("api")
@oauth.require_oauth("dataset:permission:list")
@produces("application/json")
def _get_permissions_api(info_hash):
    """
    Return a list of permission for the dataset identified by info_hash. The permissions are strings on the
    form <subject type>:<subject identifier>:<permission>. Subject type is 'user' currently. For the user subject type
    the subject identifier is the username. Finally permission is either 'w' for write, 'd' for delete and 'a' for
    admin access. Write access gives right to modify a dataset - eg to add/remove tags, delete access only gives right
Example #23
0
File: stats.py Project: sorki/faf
def by_daterange(since, to):
    """
    Render date-based report statistics including reports `since` date
    until `to` date.
    """

    try:
        if isinstance(since, str) or isinstance(since, unicode):
            since = datetime.datetime.strptime(since, "%Y-%m-%d").date()

        if isinstance(to, str) or isinstance(to, unicode):
            to = datetime.datetime.strptime(to, "%Y-%m-%d").date()
    except:
        return abort(400)

    since = min(since, to)
    to = max(since, to)

    history = "daily"
    day_count = (to - since).days
    if day_count > 30:
        history = "weekly"
    if day_count > 360:
        history = "monthly"

    def date_filter(query):
        return query.filter(hist_field >= since).filter(hist_field < to)

    hist_table, hist_field = queries.get_history_target(history)
    total_query = queries.get_history_sum(db, history=history)
    total = date_filter(total_query).one()[0]

    release_data = []

    for release in queries.get_releases(db):
        release_sum = queries.get_history_sum(db, release.opsys.name, release.version, history=history)

        release_sum = date_filter(release_sum).one()[0]
        if not release_sum:
            continue

        percentage = int(release_sum * 100.0 / total)

        comps = queries.get_report_count_by_component(db, release.opsys.name, release.version, history=history)

        comp_data = []
        for comp, count in date_filter(comps).all():
            comp_percentage = int(count * 100.0 / release_sum)
            comp_data.append((comp, count, comp_percentage))

        release_data.append({"release": release, "sum": release_sum, "comps": comp_data, "percentage": percentage})

    data = {
        "since": since,
        "to": to,
        "total": total,
        "releases": sorted(release_data, key=lambda x: x["sum"], reverse=True),
    }

    if request_wants_json():
        return jsonify(data)

    return render_template("stats/by_date.html", **data)