Ejemplo n.º 1
0
    def update_meta(self):
        dataproxy = DataStore(self.short_name)
        try:
            newcount = 0
            datasummary = dataproxy.request({"maincommand": "sqlitecommand", "command": "datasummary", "limit": -1})
            if "error" not in datasummary:
                if "total_rows" in datasummary:
                    self.record_count = datasummary["total_rows"]
                else:
                    for tabledata in datasummary.get("tables", {}).values():
                        newcount += tabledata["count"]

                    # Only update the record count when we have definitely not failed.
                    self.record_count = newcount
            else:
                print "logthis", datasummary
        except Exception, e:
            print "logthis", e
Ejemplo n.º 2
0
    def update_meta(self):
        dataproxy = DataStore(self.short_name)
        try:
            newcount = 0
            datasummary = dataproxy.request({
                "maincommand": "sqlitecommand",
                "command": "datasummary",
                "limit": -1
            })
            if "error" not in datasummary:
                if 'total_rows' in datasummary:
                    self.record_count = datasummary['total_rows']
                else:
                    for tabledata in datasummary.get("tables", {}).values():
                        newcount += tabledata["count"]

                    # Only update the record count when we have definitely not failed.
                    self.record_count = newcount
            else:
                print "logthis", datasummary
        except Exception, e:
            print "logthis", e
Ejemplo n.º 3
0
def scraperinfo(scraper, history_start_date, quietfields, rev):
    info = {}
    info["short_name"] = scraper.short_name
    info["language"] = scraper.language
    info["created"] = scraper.created_at.isoformat()

    info["title"] = scraper.title
    info["description"] = scraper.description_safepart()
    info["tags"] = [tag.name for tag in Tag.objects.get_for_object(scraper)]
    info["wiki_type"] = scraper.wiki_type
    info["privacy_status"] = scraper.privacy_status

    if scraper.wiki_type == "scraper":
        info["last_run"] = scraper.scraper.last_run and scraper.scraper.last_run.isoformat() or ""
        info["run_interval"] = scraper.scraper.run_interval

    attachables = []
    for cp in CodePermission.objects.filter(code=scraper).all():
        if cp.permitted_object.privacy_status != "deleted":
            attachables.append(cp.permitted_object.short_name)
    info["attachables"] = attachables

    # these ones have to be filtering out the incoming private scraper names
    # (the outgoing attach to list doesn't because they're refered in the code as well)
    info["attachable_here"] = []
    for cp in CodePermission.objects.filter(permitted_object=scraper).all():
        if cp.code.privacy_status not in ["deleted", "private"]:
            info["attachable_here"].append(cp.code.short_name)

    if scraper.wiki_type == "scraper":
        info["records"] = scraper.scraper.record_count  # old style datastore

        if "datasummary" not in quietfields:
            dataproxy = DataStore(scraper.short_name)
            sqlitedata = dataproxy.request(
                {"maincommand": "sqlitecommand", "command": "datasummary", "val1": 0, "val2": None}
            )
            if sqlitedata and type(sqlitedata) not in [str, unicode]:
                info["datasummary"] = sqlitedata

    if "userroles" not in quietfields:
        info["userroles"] = {}
        for ucrole in scraper.usercoderole_set.all():
            if ucrole.role not in info["userroles"]:
                info["userroles"][ucrole.role] = []
            info["userroles"][ucrole.role].append(ucrole.user.username)

    status = scraper.get_vcs_status(rev)
    if "code" not in quietfields:
        info["code"] = status["code"]

    for committag in ["currcommit", "prevcommit", "nextcommit"]:
        if committag not in quietfields:
            if committag in status:
                info[committag] = convert_history(status[committag])

    if "currcommit" not in status and "prevcommit" in status and not status["ismodified"]:
        if "filemodifieddate" in status:
            info["modifiedcommitdifference"] = str(status["filemodifieddate"] - status["prevcommit"]["date"])
            info["filemodifieddate"] = status["filemodifieddate"].isoformat()

    if "history" not in quietfields:
        history = []
        commitentries = scraper.get_commit_log("code")
        for commitentry in commitentries:
            if history_start_date and commitentry["date"] < history_start_date:
                continue
            history.append(convert_history(commitentry))
        history.reverse()
        info["history"] = history

    if scraper.wiki_type == "scraper" and "runevents" not in quietfields:
        if history_start_date:
            runevents = scraper.scraper.scraperrunevent_set.filter(run_ended__gte=history_start_date).order_by(
                "-run_started"
            )
        else:
            runevents = scraper.scraper.scraperrunevent_set.all().order_by("-run_started")[:2]

        info["runevents"] = []
        for runevent in runevents:
            info["runevents"].append(convert_run_event(runevent))

    return info
Ejemplo n.º 4
0
def sqlite_handler(request):
    short_name = request.GET.get("name")
    apikey = request.GET.get("apikey", None)

    scraper, err = getscraperorresponse(short_name)
    if err:
        result = json.dumps({"error": err, "short_name": short_name})
        if request.GET.get("callback"):
            result = "%s(%s)" % (request.GET.get("callback"), result)
        return HttpResponse(result)

    u, s, kd = None, None, ""
    if request.user.is_authenticated():
        u = request.user

    if scraper.privacy_status != "private":
        s = scraper  # XX why this only when not private? FAI
        kd = short_name
    else:
        # When private we MUST have an apikey and it should match
        if not scraper.api_actionauthorized(apikey):
            result = json.dumps({"error": "Invalid API Key", "short_name": short_name})
            if request.GET.get("callback"):
                result = "%s(%s)" % (request.GET.get("callback"), result)
            return HttpResponse(result)

    APIMetric.record("sqlite", key_data=kd, user=u, code_object=s)

    dataproxy = DataStore(request.GET.get("name"))
    lattachlist = request.GET.get("attach", "").split(";")
    attachlist = []
    for aattach in lattachlist:
        if aattach:
            aa = aattach.split(",")
            attachi = {"name": aa[0], "asname": (len(aa) == 2 and aa[1] or None)}
            attachlist.append(attachi)
            dataproxy.request(
                {
                    "maincommand": "sqlitecommand",
                    "command": "attach",
                    "name": attachi["name"],
                    "asname": attachi["asname"],
                }
            )

    sqlquery = request.GET.get("query", "")
    format = request.GET.get("format", "json")
    if format == "json":
        format = "jsondict"

    req = {"maincommand": "sqliteexecute", "sqlquery": sqlquery, "data": None, "attachlist": attachlist}
    if format == "csv":
        req["streamchunking"] = 1000

    # This is inlined from the dataproxy.request() function to allow for
    # receiveoneline to perform multiple readlines in this case.
    # (this is the stream-chunking thing.  the right interface is not yet
    # apparent)

    dataproxy.m_socket.sendall(json.dumps(req) + "\n")

    if format not in ["jsondict", "jsonlist", "csv", "htmltable", "rss2"]:
        dataproxy.close()
        return HttpResponse("Error: the format '%s' is not supported" % format)

    if format in ["csv", "htmltable"]:
        return out_csvhtml(dataproxy, scraper.short_name, format)
    if format == "rss2":
        return out_rss2(dataproxy, scraper)

    return out_json(dataproxy, request.GET.get("callback"), scraper.short_name, format)
Ejemplo n.º 5
0
def scraperinfo(scraper, history_start_date, quietfields, rev):
    info = {}
    info['short_name'] = scraper.short_name
    info['language'] = scraper.language
    info['created'] = scraper.created_at.isoformat()

    info['title'] = scraper.title
    info['description'] = scraper.description_safepart()
    info['tags'] = [tag.name for tag in Tag.objects.get_for_object(scraper)]
    info['wiki_type'] = scraper.wiki_type
    info['privacy_status'] = scraper.privacy_status

    if scraper.wiki_type == 'scraper':
        info[
            'last_run'] = scraper.scraper.last_run and scraper.scraper.last_run.isoformat(
            ) or ''
        info['run_interval'] = scraper.scraper.run_interval

    attachables = []
    for cp in CodePermission.objects.filter(code=scraper).all():
        if cp.permitted_object.privacy_status != "deleted":
            attachables.append(cp.permitted_object.short_name)
    info["attachables"] = attachables

    # these ones have to be filtering out the incoming private scraper names
    # (the outgoing attach to list doesn't because they're refered in the code as well)
    info["attachable_here"] = []
    for cp in CodePermission.objects.filter(permitted_object=scraper).all():
        if cp.code.privacy_status not in ["deleted", "private"]:
            info["attachable_here"].append(cp.code.short_name)

    if scraper.wiki_type == 'scraper':
        info['records'] = scraper.scraper.record_count  # old style datastore

        if 'datasummary' not in quietfields:
            dataproxy = DataStore(scraper.short_name)
            sqlitedata = dataproxy.request({
                "maincommand": "sqlitecommand",
                "command": "datasummary",
                "val1": 0,
                "val2": None
            })
            if sqlitedata and type(sqlitedata) not in [str, unicode]:
                info['datasummary'] = sqlitedata

    if 'userroles' not in quietfields:
        info['userroles'] = {}
        for ucrole in scraper.usercoderole_set.all():
            if ucrole.role not in info['userroles']:
                info['userroles'][ucrole.role] = []
            info['userroles'][ucrole.role].append(ucrole.user.username)

    status = scraper.get_vcs_status(rev)
    if 'code' not in quietfields:
        info['code'] = status["code"]

    for committag in ["currcommit", "prevcommit", "nextcommit"]:
        if committag not in quietfields:
            if committag in status:
                info[committag] = convert_history(status[committag])

    if "currcommit" not in status and "prevcommit" in status and not status[
            "ismodified"]:
        if 'filemodifieddate' in status:
            info["modifiedcommitdifference"] = str(
                status["filemodifieddate"] - status["prevcommit"]["date"])
            info['filemodifieddate'] = status['filemodifieddate'].isoformat()

    if 'history' not in quietfields:
        history = []
        commitentries = scraper.get_commit_log("code")
        for commitentry in commitentries:
            if history_start_date and commitentry['date'] < history_start_date:
                continue
            history.append(convert_history(commitentry))
        history.reverse()
        info['history'] = history

    if scraper.wiki_type == 'scraper' and 'runevents' not in quietfields:
        if history_start_date:
            runevents = scraper.scraper.scraperrunevent_set.filter(
                run_ended__gte=history_start_date).order_by('-run_started')
        else:
            runevents = scraper.scraper.scraperrunevent_set.all().order_by(
                '-run_started')[:2]

        info['runevents'] = []
        for runevent in runevents:
            info['runevents'].append(convert_run_event(runevent))

    return info
Ejemplo n.º 6
0
def sqlite_handler(request):
    short_name = request.GET.get('name')
    apikey = request.GET.get('apikey', None)

    scraper, err = getscraperorresponse(short_name)
    if err:
        result = json.dumps({'error': err, "short_name": short_name})
        if request.GET.get("callback"):
            result = "%s(%s)" % (request.GET.get("callback"), result)
        return HttpResponse(result)

    u, s, kd = None, None, ""
    if request.user.is_authenticated():
        u = request.user

    if scraper.privacy_status != "private":
        s = scraper  # XX why this only when not private? FAI
        kd = short_name
    else:
        # When private we MUST have an apikey and it should match
        if not scraper.api_actionauthorized(apikey):
            result = json.dumps({
                'error': "Invalid API Key",
                "short_name": short_name
            })
            if request.GET.get("callback"):
                result = "%s(%s)" % (request.GET.get("callback"), result)
            return HttpResponse(result)

    APIMetric.record("sqlite", key_data=kd, user=u, code_object=s)

    dataproxy = DataStore(request.GET.get('name'))
    lattachlist = request.GET.get('attach', '').split(";")
    attachlist = []
    for aattach in lattachlist:
        if aattach:
            aa = aattach.split(",")
            attachi = {
                "name": aa[0],
                "asname": (len(aa) == 2 and aa[1] or None)
            }
            attachlist.append(attachi)
            dataproxy.request({
                "maincommand": "sqlitecommand",
                "command": "attach",
                "name": attachi["name"],
                "asname": attachi["asname"]
            })

    sqlquery = request.GET.get('query', "")
    format = request.GET.get("format", "json")
    if format == "json":
        format = "jsondict"

    req = {
        "maincommand": "sqliteexecute",
        "sqlquery": sqlquery,
        "data": None,
        "attachlist": attachlist
    }
    if format == "csv":
        req["streamchunking"] = 1000

    # This is inlined from the dataproxy.request() function to allow for
    # receiveoneline to perform multiple readlines in this case.
    # (this is the stream-chunking thing.  the right interface is not yet
    # apparent)

    dataproxy.m_socket.sendall(json.dumps(req) + '\n')

    if format not in [
            "jsondict", "jsonlist", "csv", "htmltable", "rss2",
            "base64singleton", "htmltable_unescaped"
    ]:
        dataproxy.close()
        return HttpResponse("Error: the format '%s' is not supported" % format)

    if format in ["csv", 'htmltable', 'htmltable_unescaped']:
        return out_csvhtml(dataproxy, scraper.short_name, format)
    if format == "rss2":
        return out_rss2(dataproxy, scraper)
    if format == "base64singleton":
        return out_base64singleton(dataproxy,
                                   request.GET.get('mimetype', "text/plain"))

    return out_json(dataproxy, request.GET.get("callback"), scraper.short_name,
                    format)