Beispiel #1
0
def writeSubscriptions(templateName, outputFile=None):
    subscriptions = subscriptionParser.readSubscriptions().values()
    template = get_template(get_config().get('subscriptions',
                                             templateName + 'Template'))
    if outputFile == None:
        outputFile = get_config().get('subscriptions', templateName + 'File')
    template.stream({
        'subscriptions': subscriptions
    }).dump(outputFile, encoding='utf-8')
Beispiel #2
0
def checkSubscriptions():
    subscriptions = subscriptionParser.readSubscriptions().values()
    subscriptions.sort(key=lambda s: s.name.lower())

    urls = {}
    sites = {}
    for subscription in subscriptions:
        for key in ('homepage', 'forum', 'blog', 'faq', 'contact', 'changelog',
                    'policy'):
            url = getattr(subscription, key)
            if url != None:
                urls[url] = True
        for (title, url, complete) in subscription.variants:
            urls[url] = True

    pool = eventlet.GreenPool()
    for (url, result) in pool.imap(checkURL, urls.iterkeys()):
        urls[url] = result
        if result is False:
            sites[urlparse(url).netloc] = True
    for (site, result) in pool.imap(checkSite, sites.iterkeys()):
        sites[site] = result

    result = []
    for subscription in subscriptions:
        s = {'name': subscription.name, 'links': []}
        result.append(s)
        for key in ('homepage', 'forum', 'blog', 'faq', 'contact', 'changelog',
                    'policy'):
            url = getattr(subscription, key)
            if url != None:
                site = urlparse(url).netloc
                s['links'].append({
                    'url': url,
                    'title': key[0].upper() + key[1:],
                    'result': urls[url],
                    'siteResult': site in sites and sites[site],
                })
        for (title, url, complete) in subscription.variants:
            site = urlparse(url).netloc
            s['links'].append({
                'url': url,
                'title': title,
                'result': urls[url],
                'siteResult': site in sites and sites[site],
            })
    return result
Beispiel #3
0
def checkSubscriptions():
    subscriptions = subscriptionParser.readSubscriptions().values()
    subscriptions.sort(key=lambda s: s.name.lower())

    urls = {}
    sites = {}
    for subscription in subscriptions:
        for key in ('homepage', 'forum', 'blog', 'faq', 'contact', 'changelog', 'policy'):
            url = getattr(subscription, key)
            if url != None:
                urls[url] = True
        for (title, url, complete) in subscription.variants:
            urls[url] = True

    pool = eventlet.GreenPool()
    for (url, result) in pool.imap(checkURL, urls.iterkeys()):
        urls[url] = result
        if result is False:
            sites[urlparse(url).netloc] = True
    for (site, result) in pool.imap(checkSite, sites.iterkeys()):
        sites[site] = result

    result = []
    for subscription in subscriptions:
        s = {'name': subscription.name, 'links': []}
        result.append(s)
        for key in ('homepage', 'forum', 'blog', 'faq', 'contact', 'changelog', 'policy'):
            url = getattr(subscription, key)
            if url != None:
                site = urlparse(url).netloc
                s['links'].append({
                    'url': url,
                    'title': key[0].upper() + key[1:],
                    'result': urls[url],
                    'siteResult': site in sites and sites[site],
                })
        for (title, url, complete) in subscription.variants:
            site = urlparse(url).netloc
            s['links'].append({
                'url': url,
                'title': title,
                'result': urls[url],
                'siteResult': site in sites and sites[site],
            })
    return result
Beispiel #4
0
def loadSubscriptions():
  global interval, weekDay

  subscriptions = subscriptionParser.readSubscriptions()

  results = {}
  resultList = []
  for subscription in subscriptions.values():
    if subscription.digest == 'daily' and interval == 'week':
      continue
    if subscription.digest == 'weekly' and interval == 'day':
      continue
    if interval == 'week' and subscription.digestDay != weekDay:
      continue

    for [title, url, complete] in subscription.variants:
      results[url] = subscription
    resultList.append(subscription)
  return (results, resultList)
Beispiel #5
0
def loadSubscriptions():
    global interval, weekDay

    subscriptions = subscriptionParser.readSubscriptions()

    results = {}
    resultList = []
    for subscription in subscriptions.values():
        if subscription.digest == 'daily' and interval == 'week':
            continue
        if subscription.digest == 'weekly' and interval == 'day':
            continue
        if interval == 'week' and subscription.digestDay != weekDay:
            continue

        for [title, url, complete] in subscription.variants:
            results[url] = subscription
        resultList.append(subscription)
    return (results, resultList)
Beispiel #6
0
def updateSubscriptionList():
    cursor = get_db().cursor(MySQLdb.cursors.DictCursor)
    executeQuery(cursor, 'SELECT id, url FROM #PFX#subscriptions')
    subids = {}
    for dbsub in cursor:
        subids[dbsub['url']] = dbsub['id']

    subscriptions = subscriptionParser.readSubscriptions()
    for subscription in subscriptions.values():
        for title, url, complete in subscription.variants:
            id = subids.get(url)
            if id == None:
                executeQuery(cursor, 'INSERT INTO #PFX#subscriptions (url) VALUES (%s)', url)
            else:
                del subids[url]

    for url in subids:
        executeQuery(cursor, 'DELETE FROM #PFX#subscriptions WHERE id = %s', subids[url])
    get_db().commit()
Beispiel #7
0
def loadSubscriptions(counts):
    global interval

    subscriptions = subscriptionParser.readSubscriptions()

    knownURLs = {}
    for subscription in subscriptions.values():
        for title, url, complete in subscription.variants:
            knownURLs[url] = True

    (redirectData, goneData) = subscriptionParser.getFallbackData()
    redirects = processFile(redirectData, counts)
    gone = processFile(goneData, counts)

    unaccounted = filter(lambda url: counts[url] >= 10, counts.keys())
    unaccounted.sort(key=lambda url: counts[url], reverse=True)
    for i in range(0, len(unaccounted)):
        url = unaccounted[i]
        mark = ' [?]'
        if url in knownURLs:
            mark = ''
        unaccounted[i] = '%5i %s%s' % (counts[url], url, mark)

    return (redirects, gone, unaccounted)
Beispiel #8
0
def loadSubscriptions(counts):
  global interval

  subscriptions = subscriptionParser.readSubscriptions()

  knownURLs = {}
  for subscription in subscriptions.values():
    for title, url, complete in subscription.variants:
      knownURLs[url] = True

  (redirectData, goneData) = subscriptionParser.getFallbackData()
  redirects = processFile(redirectData, counts)
  gone = processFile(goneData, counts)

  unaccounted = filter(lambda url: counts[url] >= 10, counts.keys())
  unaccounted.sort(key=lambda url: counts[url], reverse=True)
  for i in range(0, len(unaccounted)):
    url = unaccounted[i]
    mark = ' [?]'
    if url in knownURLs:
      mark = ''
    unaccounted[i] = '%5i %s%s' % (counts[url], url, mark)

  return (redirects, gone, unaccounted)
Beispiel #9
0
def writeSubscriptions(templateName, outputFile=None):
    subscriptions = subscriptionParser.readSubscriptions().values()
    template = get_template(get_config().get('subscriptions', templateName + 'Template'))
    if outputFile == None:
        outputFile = get_config().get('subscriptions', templateName + 'File')
    template.stream({'subscriptions': subscriptions}).dump(outputFile, encoding='utf-8')
def updateDigests(dir):
    global currentTime

    subs = subscriptionParser.readSubscriptions()
    defname, defemail = parseaddr(get_config().get(
        'reports', 'defaultSubscriptionRecipient'))

    subscriptions = {}
    emails = {}
    emails[defemail] = []
    for subscription in subs.values():
        for title, url, complete in subscription.variants:
            subscriptions[url] = subscription
        name, email = parseaddr(subscription.email)
        if email != '':
            emails[email] = []

    startTime = currentTime - get_config().getint('reports',
                                                  'digestDays') * 24 * 60 * 60
    for dbreport in getReports(startTime):
        report = {
            'guid':
            dbreport['guid'],
            'status':
            dbreport['status'],
            'url':
            get_config().get('reports', 'urlRoot') + dbreport['guid'] +
            '#secret=' + calculateReportSecret(dbreport['guid']),
            'site':
            dbreport['site'],
            'comment':
            dbreport['comment'],
            'type':
            dbreport['type'],
            'subscriptions': [],
            'contact':
            dbreport['contact'],
            'score':
            getUserUsefulnessScore(dbreport['contact']),
            'hasscreenshot':
            dbreport['hasscreenshot'],
            'knownIssues':
            dbreport['knownissues'],
            'time':
            dbreport['ctime'],
        }

        recipients = set()
        reportSubscriptions = getReportSubscriptions(dbreport['guid'])

        if dbreport['type'] == 'false positive' or dbreport[
                'type'] == 'false negative':
            for subscription in reportSubscriptions:
                subscriptionID = subscription.get('url', 'unknown')
                # Send false negatives to all subscription authors, false positives
                # only to subscriptions with matching filters
                if subscriptionID in subscriptions and (
                        dbreport['type'] == 'false negative'
                        or subscription.get('hasmatches', 0) > 0):
                    name, email = parseaddr(
                        subscriptions[subscriptionID].email)
                    if email and not email in recipients:
                        recipients.add(email)
                        emails[email].append(report)
                    report['subscriptions'].append(
                        getSubscriptionInfo(subscriptions[subscriptionID]))
        else:
            for subscription in reportSubscriptions:
                subscriptionID = subscription.get('url', 'unknown')
                report['subscriptions'].append(
                    getSubscriptionInfo(subscriptions[subscriptionID]))
            recipients.add(defemail)
            emails[defemail].append(report)

    # Generate new digests
    digests = set()
    for email, reports in emails.iteritems():
        if len(reports) == 0:
            continue
        file = getDigestPath(dir, email)
        template = get_template(get_config().get('reports',
                                                 'htmlDigestTemplate'))
        template.stream({
            'email': email,
            'reports': reports
        }).dump(file, encoding='utf-8')
        digests.add(file)

    # Remove not updated digests which are more then 2 weeks old
    for filename in os.listdir(dir):
        file = os.path.join(dir, filename)
        if os.path.isfile(file) and file not in digests and re.match(
                r'^[\da-f]{32}\.html$', filename
        ) and os.stat(file).st_mtime < currentTime - 14 * 24 * 60 * 60:
            os.remove(file)
Beispiel #11
0
def updateDigests(dir):
  global currentTime
  
  subs = subscriptionParser.readSubscriptions()
  defname, defemail = parseaddr(get_config().get('reports', 'defaultSubscriptionRecipient'))

  subscriptions = {}
  emails = {}
  emails[defemail] = []
  for subscription in subs.values():
    for title, url, complete in subscription.variants:
      subscriptions[url] = subscription
    name, email = parseaddr(subscription.email)
    if email != '':
      emails[email] = []
      
  startTime = currentTime - get_config().getint('reports', 'digestDays') * 24*60*60
  for dbreport in getReports(startTime):
    report = {
      'guid': dbreport['guid'],
      'status': dbreport['status'],
      'url': get_config().get('reports', 'urlRoot') + dbreport['guid'] + '#secret=' + calculateReportSecret(dbreport['guid']),
      'site': dbreport['site'],
      'comment': dbreport['comment'],
      'type': dbreport['type'],
      'subscriptions': [],
      'contact': dbreport['contact'],
      'score': getUserUsefulnessScore(dbreport['contact']),
      'hasscreenshot': dbreport['hasscreenshot'],
      'knownIssues': dbreport['knownissues'],
      'time': dbreport['ctime'],
    }

    recipients = set()
    reportSubscriptions = getReportSubscriptions(dbreport['guid'])

    if dbreport['type'] == 'false positive' or dbreport['type'] == 'false negative':
      for subscription in reportSubscriptions:
        subscriptionID = subscription.get('url', 'unknown')
        # Send false negatives to all subscription authors, false positives
        # only to subscriptions with matching filters
        if subscriptionID in subscriptions and (dbreport['type'] == 'false negative' or subscription.get('hasmatches', 0) > 0):
          name, email = parseaddr(subscriptions[subscriptionID].email)
          if email and not email in recipients:
            recipients.add(email)
            emails[email].append(report)
          report['subscriptions'].append(getSubscriptionInfo(subscriptions[subscriptionID]))
    else:
      for subscription in reportSubscriptions:
        subscriptionID = subscription.get('url', 'unknown')
        report['subscriptions'].append(getSubscriptionInfo(subscriptions[subscriptionID]))
      recipients.add(defemail)
      emails[defemail].append(report)
      
  # Generate new digests
  digests = set()
  for email, reports in emails.iteritems():
    if len(reports) == 0:
      continue
    file = getDigestPath(dir, email)
    template = get_template(get_config().get('reports', 'htmlDigestTemplate'))
    template.stream({'email': email, 'reports': reports}).dump(file, encoding='utf-8')
    digests.add(file)
  
  # Remove not updated digests which are more then 2 weeks old
  for filename in os.listdir(dir):
    file = os.path.join(dir, filename)
    if os.path.isfile(file) and file not in digests and re.match(r'^[\da-f]{32}\.html$', filename) and os.stat(file).st_mtime < currentTime - 14*24*60*60:
      os.remove(file)
def updateDigests(dir):
    global currentTime

    subs = subscriptionParser.readSubscriptions()
    defname, defemail = parseaddr(get_config().get("reports", "defaultSubscriptionRecipient"))

    subscriptions = {}
    emails = {}
    emails[defemail] = []
    for subscription in subs.values():
        for title, url, complete in subscription.variants:
            subscriptions[url] = subscription
        name, email = parseaddr(subscription.email)
        if email != "":
            emails[email] = []

    startTime = currentTime - get_config().getint("reports", "digestDays") * 24 * 60 * 60
    for dbreport in getReports(startTime):
        report = {
            "guid": dbreport["guid"],
            "status": dbreport["status"],
            "url": get_config().get("reports", "urlRoot")
            + dbreport["guid"]
            + "#secret="
            + calculateReportSecret(dbreport["guid"]),
            "site": dbreport["site"],
            "comment": dbreport["comment"],
            "type": dbreport["type"],
            "subscriptions": [],
            "contact": dbreport["contact"],
            "score": getUserUsefulnessScore(dbreport["contact"]),
            "hasscreenshot": dbreport["hasscreenshot"],
            "knownIssues": dbreport["knownissues"],
            "time": dbreport["ctime"],
        }

        recipients = set()
        reportSubscriptions = getReportSubscriptions(dbreport["guid"])

        if dbreport["type"] == "false positive" or dbreport["type"] == "false negative":
            for subscription in reportSubscriptions:
                subscriptionID = subscription.get("url", "unknown")
                # Send false negatives to all subscription authors, false positives
                # only to subscriptions with matching filters
                if subscriptionID in subscriptions and (
                    dbreport["type"] == "false negative" or subscription.get("hasmatches", 0) > 0
                ):
                    name, email = parseaddr(subscriptions[subscriptionID].email)
                    if email and not email in recipients:
                        recipients.add(email)
                        emails[email].append(report)
                    report["subscriptions"].append(getSubscriptionInfo(subscriptions[subscriptionID]))
        else:
            for subscription in reportSubscriptions:
                subscriptionID = subscription.get("url", "unknown")
                report["subscriptions"].append(getSubscriptionInfo(subscriptions[subscriptionID]))
            recipients.add(defemail)
            emails[defemail].append(report)

    # Generate new digests
    digests = set()
    for email, reports in emails.iteritems():
        if len(reports) == 0:
            continue
        file = getDigestPath(dir, email)
        template = get_template(get_config().get("reports", "htmlDigestTemplate"))
        template.stream({"email": email, "reports": reports}).dump(file, encoding="utf-8")
        digests.add(file)

    # Remove not updated digests which are more then 2 weeks old
    for filename in os.listdir(dir):
        file = os.path.join(dir, filename)
        if (
            os.path.isfile(file)
            and file not in digests
            and re.match(r"^[\da-f]{32}\.html$", filename)
            and os.stat(file).st_mtime < currentTime - 14 * 24 * 60 * 60
        ):
            os.remove(file)