Exemplo n.º 1
0
def main(args):
    arguments = args[1:]

    if len(arguments) != 1:
        sys.stderr.write("Usage: tool_slice.py file\n")
        sys.exit(1)

    # Because below I'm lazy
    if not arguments[0].endswith(".sqlite3"):
        sys.stderr.write("error: Input file must end with .sqlite3 suffix\n")
        sys.exit(1)

    connection = sqlite3.connect(arguments[0])
    connection.row_factory = sqlite3.Row

    since = TIMES.popleft()
    while TIMES:
        until = TIMES.popleft()
        output = sqlite3.connect(arguments[0].replace(".sqlite3",
          "_%d-%02d.sqlite3" % (since.year, since.month)))
        table_speedtest.create(output)

        cursor = connection.cursor()
        cursor.execute("""SELECT * FROM speedtest WHERE timestamp >= ?
          AND timestamp < ?;""", (since.strftime("%s"), until.strftime("%s")))

        for row in cursor:
            table_speedtest.insert(output, dict(row), commit=False,
              override_timestamp=False)

        output.commit()
        since = until
Exemplo n.º 2
0
 def speedtest_store(self, message):
     ''' Saves the results of a speedtest test '''
     DATABASE.connect()
     if DATABASE.readonly:
         logging.warning('backend_neubot: readonly database')
         return
     table_speedtest.insert(DATABASE.connection(), message)
Exemplo n.º 3
0
def main(args):
    arguments = args[1:]

    if len(arguments) != 1:
        sys.stderr.write("Usage: tool_goodset.py file\n")
        sys.exit(1)

    # Because I'm lazy below
    if not arguments[0].endswith(".sqlite3"):
        sys.stderr.write("error: Input file must end with .sqlite3\n")
        sys.exit(1)

    outfile = arguments[0].replace(".sqlite3", "-goodset.sqlite3")
    sys.stderr.write("* Output database file: %s\n" % outfile)
    output = sqlite3.connect(outfile)
    table_speedtest.create(output)

    sys.stderr.write("* Processing file: %s... " %  arguments[0])
    input_dbm = sqlite3.connect(arguments[0])
    input_dbm.row_factory = sqlite3.Row

    # Get the number of rows in the original database
    cursor = input_dbm.cursor()
    cursor.execute("SELECT COUNT(*) FROM speedtest;")
    total = cursor.next()[0]

    # Copy the goodset to the new database
    cursor = input_dbm.cursor()
    cursor.execute("""SELECT * FROM speedtest WHERE download_speed < ?
      AND latency < ?;""", (MAX_DOWNLOAD_SPEED, MAX_LATENCY))
    for row in cursor:
        table_speedtest.insert(output, dict(row), commit=False,
          override_timestamp=False)
    sys.stderr.write("done\n")

    sys.stderr.write("* Committing changes to: %s\n" % outfile)
    output.commit()

    # Get the number of rows in the new database
    cursor = output.cursor()
    cursor.execute("SELECT COUNT(*) FROM speedtest;")
    goodset = cursor.next()[0]

    if total:
        sys.stdout.write("%d/%d (%.2f%%)\n" % (goodset, total,
          goodset * 100.0 / total))
    else:
        sys.stdout.write("0/0 (0.00%)\n")
Exemplo n.º 4
0
    def runTest(self):
        """Make sure speedtest table works as expected"""

        connection = sqlite3.connect(":memory:")
        connection.row_factory = sqlite3.Row
        table_speedtest.create(connection)
        table_speedtest.create(connection)

        v = map(None, ResultIterator())
        for d in v:
            table_speedtest.insert(connection, d, override_timestamp=False)

        v1 = table_speedtest.listify(connection)
        self.assertEquals(sorted(v), sorted(v1))

        since = utils.timestamp() - 7 * 24 * 60 * 60
        until = utils.timestamp() - 3 * 24 * 60 * 60
        v2 = table_speedtest.listify(connection, since=since, until=until)
        self.assertTrue(len(v2) < len(v))

        table_speedtest.prune(connection, until)
        self.assertTrue(len(table_speedtest.listify(connection)) < len(v1))
Exemplo n.º 5
0
    def runTest(self):
        """Make sure speedtest table works as expected"""

        connection = sqlite3.connect(":memory:")
        connection.row_factory = sqlite3.Row
        table_speedtest.create(connection)
        table_speedtest.create(connection)

        v = map(None, ResultIterator())
        for d in v:
            table_speedtest.insert(connection, d, override_timestamp=False)

        v1 = table_speedtest.listify(connection)
        self.assertEquals(sorted(v), sorted(v1))

        since = utils.timestamp() - 7 * 24 * 60 * 60
        until = utils.timestamp() - 3 * 24 * 60 * 60
        v2 = table_speedtest.listify(connection, since=since, until=until)
        self.assertTrue(len(v2) < len(v))

        table_speedtest.prune(connection, until)
        self.assertTrue(len(table_speedtest.listify(connection)) < len(v1))
Exemplo n.º 6
0
    def collect_legacy(self, stream, request_body, request):
        ''' Invoked when we must save the result of a session '''
        ident = str(hash(stream))
        if ident not in self.clients:
            #
            # Before Neubot 0.4.2 we were using multiple connections
            # for speedtest, which were used both for testing and for
            # negotiating/collecting.  Sometimes the connection used
            # to collect is not the one used to negotiate: the code
            # uses the one that terminates the upload first.
            # When this happens we inspect the Authorization header
            # before deciding the collect request is an abuse.
            #
            authorization = request['Authorization']
            if authorization not in self.clients:
                raise RuntimeError('Not authorized to collect')
            else:
                LOG.warning('speedtest: working around multiple conns issue')
                ident = authorization

        # Note: no more than one collect per session
        self.clients.remove(ident)

        #
        # Backward compatibility: the variable name changed from
        # can_share to can_publish after Neubot 0.4.5
        #
        if 'privacy_can_share' in request_body:
            request_body['privacy_can_publish'] = request_body[
              'privacy_can_share']
            del request_body['privacy_can_share']

        if privacy.collect_allowed(request_body):
            table_speedtest.insert(DATABASE.connection(), request_body)
        else:
            LOG.warning('* bad privacy settings: %s' % str(stream))

        return {}
Exemplo n.º 7
0
 def speedtest_store(self, message):
     ''' Saves the results of a speedtest test '''
     table_speedtest.insert(DATABASE.connection(), message)
Exemplo n.º 8
0
def insertxxx(connection, obj, commit=True, override_timestamp=True):
    """ Hack to insert a result object into speedtest table,
        converting it into a dictionary. """
    table_speedtest.insert(connection, obj_to_dict(obj), commit, override_timestamp)
Exemplo n.º 9
0
def main(args):
    arguments = args[1:]

    if len(arguments) != 1:
        sys.stderr.write("Usage: tool_privacy.py file\n")
        sys.exit(1)

    # Because I'm lazy below
    if not arguments[0].endswith(".sqlite3"):
        sys.stderr.write("error: Input file name must end with .sqlite3\n")
        sys.exit(1)

    connection = sqlite3.connect(arguments[0])
    connection.row_factory = sqlite3.Row

    #
    # Walk the database once and collect the most recent
    # permission for each unique identifier.  We will then
    # use it to decide whether we can publish or not.
    #
    cursor = connection.cursor()
    cursor.execute("SELECT * FROM speedtest;")
    for row in cursor:
        PERMS[row['uuid']] = (row['privacy_informed'],
                              row['privacy_can_collect'],
                              row['privacy_can_share'])

    #
    # Build another database.  Yes, from scratch.  I don't
    # want leakage of your personal data to be possible, by
    # design.
    #
    output = sqlite3.connect(arguments[0].replace(".sqlite3",
                             "-privacy.sqlite3"))
    table_speedtest.create(output)

    #
    # Walk again the original database and honour the
    # privacy permissions.  We replace your Internet address
    # with all zeros, which is quite a good measure to
    # hide who you are.
    #
    total, can_share = 0, 0
    cursor = connection.cursor()
    cursor.execute("SELECT * FROM speedtest;")
    for row in cursor:
        total = total + 1
        dictionary = dict(row)

        # Honour permissions
        if PERMS[dictionary['uuid']] != (1, 1, 1):
            #
            # TODO Here it would be nice to geolocate the
            # client and resolv the provider to populate a
            # table that gives some information for the
            # clients that have not given permission.
            #
            dictionary['internal_address'] = "0.0.0.0"
            dictionary['real_address'] = "0.0.0.0"
        else:
            can_share = can_share + 1

        # Override permissions
        (dictionary['privacy_informed'],
         dictionary['privacy_can_collect'],
         dictionary['privacy_can_share']) = PERMS[dictionary['uuid']]

        # NOTE commit=False or it will take an Age!
        table_speedtest.insert(output, dictionary, commit=False,
          override_timestamp=False)

    output.execute("VACUUM;")
    output.commit()

    #
    # Spit out per row statistics so we see how many rows we
    # can publish out of the total number of rows we have been
    # able to collect.
    #
    if total:
        sys.stdout.write("rows: %d/%d (%.02f%%)\n" % (can_share, total,
          (100.0 * can_share)/total))
    else:
        sys.stdout.write("rows: 0/0 (0.0%)\n")

    #
    # Now tell the poor programmer what is the distribution
    # of privacy permissions one can find in the wild.
    #
    per_uuid= {}
    total_uuid = len(PERMS.values())
    for tpl in PERMS.values():
        if not tpl in per_uuid:
            per_uuid[tpl] = 0
        per_uuid[tpl] += 1

    if total_uuid:
        for perm in per_uuid:
            sys.stdout.write("perms: %s: %d/%d (%.02f%%)\n" % (perm,
              per_uuid[perm], total_uuid, (100.0 * per_uuid[perm])/total_uuid))
    else:
        sys.stdout.write("perms: N/A: 0/0 (0.0%)\n")

    sys.exit(0)
Exemplo n.º 10
0
 def insert(self, row):
     dictionary = dict(row)
     table_speedtest.insert(self.dbm.connection(), dictionary,
       commit=False, override_timestamp=False)
     self.since = int(dictionary["timestamp"])