コード例 #1
0
ファイル: measurement.py プロジェクト: uoaerg/onionperf
    def __start_tor(self, name, control_port, socks_port, hs_port_mapping=None):
        logging.info("Starting Tor {0} process with ControlPort={1}, SocksPort={2}...".format(name, control_port, socks_port))

        tor_datadir = "{0}/tor-{1}".format(self.datadir_path, name)
        if not os.path.exists(tor_datadir): os.makedirs(tor_datadir)

        tor_config_template = "ORPort 0\nDirPort 0\nControlPort {0}\nSocksPort {1}\nSocksListenAddress 127.0.0.1\nClientOnly 1\n\
WarnUnsafeSocks 0\nSafeLogging 0\nMaxCircuitDirtiness 60 seconds\nUseEntryGuards 0\nDataDirectory {2}\nLog INFO stdout\n"
        tor_config = tor_config_template.format(control_port, socks_port, tor_datadir)

        tor_logpath = "{0}/onionperf.tor.log".format(tor_datadir)
        tor_writable = util.FileWritable(tor_logpath)
        logging.info("Logging Tor {0} process output to {1}".format(name, tor_logpath))

        # from stem.process import launch_tor_with_config
        # tor_subp = launch_tor_with_config(tor_config, tor_cmd=self.tor_bin_path, completion_percent=100, init_msg_handler=None, timeout=None, take_ownership=False)
        tor_cmd = "{0} -f -".format(self.tor_bin_path)
        tor_stdin_bytes = str_tools._to_bytes(tor_config)
        tor_ready_str = "Bootstrapped 100"
        tor_ready_ev = threading.Event()
        tor_args = (tor_cmd, tor_datadir, tor_writable, self.done_event, tor_stdin_bytes, tor_ready_str, tor_ready_ev)
        tor_watchdog = threading.Thread(target=watchdog_thread_task, name="tor_{0}_watchdog".format(name), args=tor_args)
        tor_watchdog.start()
        self.threads.append(tor_watchdog)

        # wait until Tor finishes bootstrapping
        tor_ready_ev.wait()

        torctl_logpath = "{0}/onionperf.torctl.log".format(tor_datadir)
        torctl_writable = util.FileWritable(torctl_logpath)
        logging.info("Logging Tor {0} control port monitor output to {1}".format(name, torctl_logpath))

        # give a few seconds to make sure Tor had time to start listening on the control port
        time.sleep(3)

        torctl_events = [e for e in monitor.get_supported_torctl_events() if e not in ['DEBUG', 'INFO', 'NOTICE', 'WARN', 'ERR']]
        newnym_interval_seconds = 300
        torctl_args = (control_port, torctl_writable, torctl_events, newnym_interval_seconds, self.done_event)
        torctl_helper = threading.Thread(target=monitor.tor_monitor_run, name="torctl_{0}_helper".format(name), args=torctl_args)
        torctl_helper.start()
        self.threads.append(torctl_helper)

        if hs_port_mapping is not None:
            logging.info("Creating ephemeral hidden service with v2 onions...")
            with Controller.from_port(port=control_port) as torctl:
                torctl.authenticate()
                response = torctl.create_ephemeral_hidden_service(hs_port_mapping, detached=True, await_publication=True)
                self.hs_service_id = response.service_id
                self.hs_control_port = control_port
                logging.info("Ephemeral hidden service is available at {0}.onion".format(response.service_id))

            logging.info("Creating ephemeral hidden service with v3 onions...")
            with Controller.from_port(port=control_port) as torctl:
                torctl.authenticate()
                response = torctl.create_ephemeral_hidden_service(hs_port_mapping, detached=True, await_publication=True, key_content='ED25519-V3')
                self.hs_v3_service_id = response.service_id
                self.hs_v3_control_port = control_port
                logging.info("Ephemeral hidden service is available at {0}.onion".format(response.service_id))
        return tor_writable, torctl_writable
コード例 #2
0
    def __start_tgen(self, name, tgen_port, socks_port=None, server_urls=None):
        logging.info("Starting TGen {0} process on port {1}...".format(
            name, tgen_port))

        tgen_datadir = "{0}/tgen-{1}".format(self.datadir_path, name)
        if not os.path.exists(tgen_datadir): os.makedirs(tgen_datadir)

        tgen_confpath = "{0}/tgen.graphml.xml".format(tgen_datadir)
        if os.path.exists(tgen_confpath): os.remove(tgen_confpath)
        if socks_port is None:
            model.ListenModel(
                tgen_port="{0}".format(tgen_port)).dump_to_file(tgen_confpath)
            logging.info(
                "TGen server running at 0.0.0.0:{0}".format(tgen_port))
        else:
            model.TorperfModel(tgen_port="{0}".format(tgen_port),
                               tgen_servers=server_urls,
                               socksproxy="127.0.0.1:{0}".format(
                                   socks_port)).dump_to_file(tgen_confpath)

        tgen_logpath = "{0}/onionperf.tgen.log".format(tgen_datadir)
        tgen_writable = util.FileWritable(tgen_logpath)
        logging.info("Logging TGen {1} process output to {0}".format(
            tgen_logpath, name))

        tgen_cmd = "{0} {1}".format(self.tgen_bin_path, tgen_confpath)
        tgen_args = (tgen_cmd, tgen_datadir, tgen_writable, self.done_event,
                     None, None, None)
        tgen_watchdog = threading.Thread(target=watchdog_thread_task,
                                         name="tgen_{0}_watchdog".format(name),
                                         args=tgen_args)
        tgen_watchdog.start()
        self.threads.append(tgen_watchdog)

        return tgen_writable
コード例 #3
0
    def __start_twistd(self, twistd_port):
        logging.info("Starting Twistd server process on port {0}...".format(
            twistd_port))

        twisted_datadir = "{0}/twistd".format(self.datadir_path)
        if not os.path.exists(twisted_datadir): os.makedirs(twisted_datadir)

        twisted_logpath = "{0}/onionperf.twisted.log".format(twisted_datadir)
        twisted_writable = util.FileWritable(twisted_logpath)
        logging.info(
            "Logging Twisted process output to {0}".format(twisted_logpath))

        twisted_docroot = "{0}/docroot".format(twisted_datadir)
        if not os.path.exists(twisted_docroot): os.makedirs(twisted_docroot)
        generate_docroot_index(twisted_docroot)
        self.twisted_docroot = twisted_docroot

        twisted_cmd = "{0} -n -l - web --port {1} --path {2} --mime-type=None".format(
            self.twistd_bin_path, twistd_port, twisted_docroot)
        twisted_args = (twisted_cmd, twisted_datadir, twisted_writable,
                        self.done_event, None, None, None)
        twisted_watchdog = threading.Thread(target=watchdog_thread_task,
                                            name="twistd_watchdog",
                                            args=twisted_args)
        twisted_watchdog.start()
        self.threads.append(twisted_watchdog)
        logging.info(
            "Twistd web server running at 0.0.0.0:{0}".format(twistd_port))

        return twisted_writable
コード例 #4
0
ファイル: analysis.py プロジェクト: mBr001/onionperf
    def save(self,
             filename=None,
             output_prefix=os.getcwd(),
             do_compress=True,
             version=1.0):
        if filename is None:
            if self.date_filter is None:
                filename = "onionperf.analysis.json.xz"
            else:
                filename = "{}.onionperf.analysis.json.xz".format(
                    util.date_to_string(self.date_filter))

        filepath = os.path.abspath(
            os.path.expanduser("{0}/{1}".format(output_prefix, filename)))
        if not os.path.exists(output_prefix):
            os.makedirs(output_prefix)

        logging.info("saving analysis results to {0}".format(filepath))

        outf = util.FileWritable(filepath, do_compress=do_compress)
        json.dump(self.json_db,
                  outf,
                  sort_keys=True,
                  separators=(',', ': '),
                  indent=2)
        outf.close()

        logging.info("done!")
コード例 #5
0
    def __start_tor(self, name, control_port, socks_port, hs_port_mapping=None):
        logging.info("Starting Tor {0} process with ControlPort={1}, SocksPort={2}...".format(name, control_port, socks_port))
        tor_datadir = "{0}/tor-{1}".format(self.datadir_path, name)
        key_path_v2 = "{0}/os_key_v2".format(self.privatedir_path)
        key_path_v3 = "{0}/os_key_v3".format(self.privatedir_path)

        if not os.path.exists(tor_datadir): os.makedirs(tor_datadir)
        tor_config = self.create_tor_config(control_port,socks_port,tor_datadir,name)

        tor_logpath = "{0}/onionperf.tor.log".format(tor_datadir)
        tor_writable = util.FileWritable(tor_logpath)
        logging.info("Logging Tor {0} process output to {1}".format(name, tor_logpath))

        # from stem.process import launch_tor_with_config
        # tor_subp = launch_tor_with_config(tor_config, tor_cmd=self.tor_bin_path, completion_percent=100, init_msg_handler=None, timeout=None, take_ownership=False)
        tor_cmd = "{0} -f -".format(self.tor_bin_path)
        tor_stdin_bytes = str_tools._to_bytes(tor_config)
        tor_ready_str = "Bootstrapped 100"
        tor_ready_ev = threading.Event()
        tor_args = (tor_cmd, tor_datadir, tor_writable, self.done_event, tor_stdin_bytes, tor_ready_str, tor_ready_ev)
        tor_watchdog = threading.Thread(target=watchdog_thread_task, name="tor_{0}_watchdog".format(name), args=tor_args)
        tor_watchdog.start()
        self.threads.append(tor_watchdog)

        # wait until Tor finishes bootstrapping
        tor_ready_ev.wait()

        torctl_logpath = "{0}/onionperf.torctl.log".format(tor_datadir)
        torctl_writable = util.FileWritable(torctl_logpath)
        logging.info("Logging Tor {0} control port monitor output to {1}".format(name, torctl_logpath))

        # give a few seconds to make sure Tor had time to start listening on the control port
        time.sleep(3)

        torctl_events = [e for e in monitor.get_supported_torctl_events() if e not in ['DEBUG', 'INFO', 'NOTICE', 'WARN', 'ERR']]
        newnym_interval_seconds = 300
        torctl_args = (control_port, torctl_writable, torctl_events, newnym_interval_seconds, self.done_event)
        torctl_helper = threading.Thread(target=monitor.tor_monitor_run, name="torctl_{0}_helper".format(name), args=torctl_args)
        torctl_helper.start()
        self.threads.append(torctl_helper)

        if hs_port_mapping is not None:
            self.start_onion_service(control_port, hs_port_mapping, key_path_v2)
            self.start_onion_service(control_port, hs_port_mapping, key_path_v3, v3=True)

        return tor_writable, torctl_writable
コード例 #6
0
    def export_torperf_version_1_0(self, output_prefix=os.getcwd(), datetimestamp=None, do_compress=False):
        # export file in `@type torperf 1.0` format: https://collector.torproject.org/#type-torperf
        if not os.path.exists(output_prefix):
            os.makedirs(output_prefix)

        if datetimestamp is None:
            datetimestamp = datetime.datetime.utcnow()
        datestr = "{0:04d}-{1:02d}-{2:02d}".format(datetimestamp.year, datetimestamp.month, datetimestamp.day)

        for nickname in self.json_db['data']:
            if 'tgen' not in self.json_db['data'][nickname] or 'transfers' not in self.json_db['data'][nickname]['tgen']:
                continue

            xfers_by_filesize = {}
            for xfer_db in self.json_db['data'][nickname]['tgen']['transfers'].values():
                xfers_by_filesize.setdefault(xfer_db['filesize_bytes'], []).append(xfer_db)

            streams_by_srcport = {}
            for streams_db in self.json_db['data'][nickname]['tor']['streams'].values():
                if 'source' in streams_db:
                    srcport = int(streams_db['source'].split(':')[1])
                    streams_by_srcport[srcport] = streams_db
            circuits = self.json_db['data'][nickname]['tor']['circuits']

            for filesize in xfers_by_filesize:
                filepath = "{0}/{1}-{2}-{3}.tpf{4}".format(output_prefix, nickname, filesize, datestr, '.xz' if do_compress else '')

                logging.info("saving analysis results to {0}".format(filepath))

                # always append instead of truncating file
                output = util.FileWritable(filepath, do_compress=do_compress, do_truncate=False)
                output.open()

                for xfer_db in xfers_by_filesize[filesize]:
                    # if any keys are missing, log a warning
                    try:
                        d = {}

                        d['SOURCE'] = nickname
                        d['ENDPOINTLOCAL'] = xfer_db['endpoint_local']
                        d['ENDPOINTPROXY'] = xfer_db['endpoint_proxy']
                        d['ENDPOINTREMOTE'] = xfer_db['endpoint_remote']
                        d['HOSTNAMELOCAL'] = xfer_db['hostname_local']
                        d['HOSTNAMEREMOTE'] = xfer_db['hostname_remote']

                        d['FILESIZE'] = xfer_db['filesize_bytes']
                        d['READBYTES'] = xfer_db['total_bytes_read']
                        d['WRITEBYTES'] = xfer_db['total_bytes_write']

                        def ts_to_str(ts): return"{0:.02f}".format(ts)

                        # initialize times to 0.0
                        time_keys = ['START', 'SOCKET', 'CONNECT', 'NEGOTIATE', 'REQUEST', 'RESPONSE', 'DATAREQUEST', 'DATARESPONSE', 'DATACOMPLETE', 'LAUNCH']
                        for i in range(1, 10):
                            time_keys.append('DATAPERC{}0'.format(i))
                        for key in time_keys:
                            d[key] = 0.0

                        # since these are initialized to 0, it's OK if we are missing some times, e.g. due to read error
                        if 'unix_ts_start' in xfer_db:
                            d['START'] = ts_to_str(xfer_db['unix_ts_start'])
                            if 'elapsed_seconds' in xfer_db:
                                if 'socket_create' in xfer_db['elapsed_seconds']:
                                    d['SOCKET'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['socket_create'])
                                if 'socket_connect' in xfer_db['elapsed_seconds']:
                                    d['CONNECT'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['socket_connect'])
                                if 'proxy_choice' in xfer_db['elapsed_seconds']:
                                    d['NEGOTIATE'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['proxy_choice'])
                                if 'proxy_request' in xfer_db['elapsed_seconds']:
                                    d['REQUEST'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['proxy_request'])
                                if 'proxy_response' in xfer_db['elapsed_seconds']:
                                    d['RESPONSE'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['proxy_response'])
                                if 'command' in xfer_db['elapsed_seconds']:
                                    d['DATAREQUEST'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['command'])
                                if 'response' in xfer_db['elapsed_seconds']:
                                    d['DATARESPONSE'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['response'])

                                if 'payload_progress' in xfer_db['elapsed_seconds']:
                                    # set DATAPERC[10,20,...,90]
                                    for decile in sorted(xfer_db['elapsed_seconds']['payload_progress'].keys()):
                                        if decile in xfer_db['elapsed_seconds']['payload_progress'] and xfer_db['elapsed_seconds']['payload_progress'][decile] is not None:
                                            d['DATAPERC{0}'.format(int(decile * 100))] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['payload_progress'][decile])

                                if 'last_byte' in xfer_db['elapsed_seconds']:
                                    d['DATACOMPLETE'] = ts_to_str(xfer_db['unix_ts_start'] + xfer_db['elapsed_seconds']['last_byte'])

                        # could be ioerror or timeout or etc, but i dont think torperf distinguishes these
                        d['DIDTIMEOUT'] = 1 if xfer_db['is_error'] is True else 0

                        # now get the tor parts
                        srcport = int(xfer_db['endpoint_local'].split(':')[2])
                        if srcport in streams_by_srcport:
                            stream_db = streams_by_srcport[srcport]
                            circid = int(stream_db['circuit_id'])
                            if circid in circuits:
                                circuit_db = circuits[circid]

                                d['LAUNCH'] = circuit_db['unix_ts_start']
                                d['PATH'] = ','.join([item[0].split('~')[0] for item in circuit_db['path']])
                                d['BUILDTIMES'] = ','.join([str(item[1]) for item in circuit_db['path']])
                                d['TIMEOUT'] = circuit_db['build_timeout'] if 'build_timeout' in circuit_db else None
                                d['QUANTILE'] = circuit_db['build_quantile'] if 'build_quantile' in circuit_db else None
                                d['CIRC_ID'] = circid
                                d['USED_AT'] = stream_db['unix_ts_end']
                                d['USED_BY'] = int(stream_db['stream_id'])

                        output.write("@type torperf 1.0\r\n")
                        output_str = ' '.join("{0}={1}".format(k, d[k]) for k in sorted(d.keys()) if d[k] is not None).strip()
                        output.write("{0}\r\n".format(output_str))
                    except KeyError, e:
                        logging.warning("KeyError while exporting torperf file, missing key '{0}', skipping transfer '{1}'".format(str(e), xfer_db['transfer_id']))
                        continue

                output.close()
                logging.info("done!")
コード例 #7
0
ファイル: analysis.py プロジェクト: mBr001/onionperf
    def export_torperf_version_1_1(self,
                                   output_prefix=os.getcwd(),
                                   do_compress=False):
        # export file in `@type torperf 1.0` format: https://collector.torproject.org/#type-torperf
        if not os.path.exists(output_prefix):
            os.makedirs(output_prefix)

        for nickname in self.json_db['data']:
            if 'tgen' not in self.json_db['data'][
                    nickname] or 'transfers' not in self.json_db['data'][
                        nickname]['tgen']:
                continue

            xfers_by_filesize = {}
            for xfer_db in self.json_db['data'][nickname]['tgen'][
                    'transfers'].values():
                xfers_by_filesize.setdefault(xfer_db['filesize_bytes'],
                                             []).append(xfer_db)

            streams_by_srcport, circuits = {}, []
            if 'tor' in self.json_db['data'][nickname]:
                if 'streams' in self.json_db['data'][nickname]['tor']:
                    for streams_db in self.json_db['data'][nickname]['tor'][
                            'streams'].values():
                        if 'source' in streams_db:
                            srcport = int(streams_db['source'].split(':')[1])
                            streams_by_srcport[srcport] = streams_db
                if 'circuits' in self.json_db['data'][nickname]['tor']:
                    circuits = self.json_db['data'][nickname]['tor'][
                        'circuits']

            for filesize in xfers_by_filesize:
                # build the filename
                filename_prefix = "{}-{}".format(nickname, filesize)
                filename_middle = "-{}".format(
                    util.date_to_string(self.date_filter)
                ) if self.date_filter is not None else ""
                filename_suffix = ".tpf.xz" if do_compress else ".tpf"
                filename = "{}{}{}".format(filename_prefix, filename_middle,
                                           filename_suffix)

                filepath = "{}/{}".format(output_prefix, filename)

                logging.info("saving analysis results to {0}".format(filepath))

                # always append instead of truncating file
                output = util.FileWritable(filepath,
                                           do_compress=do_compress,
                                           do_truncate=False)
                output.open()

                for xfer_db in xfers_by_filesize[filesize]:
                    # if any keys are missing, log a warning
                    try:
                        d = {}

                        d['SOURCE'] = nickname
                        d['SOURCEADDRESS'] = self.json_db['data'][nickname][
                            'measurement_ip']
                        d['ENDPOINTLOCAL'] = xfer_db['endpoint_local']
                        d['ENDPOINTPROXY'] = xfer_db['endpoint_proxy']
                        d['ENDPOINTREMOTE'] = xfer_db['endpoint_remote']
                        d['HOSTNAMELOCAL'] = xfer_db['hostname_local']
                        d['HOSTNAMEREMOTE'] = xfer_db['hostname_remote']

                        d['FILESIZE'] = xfer_db['filesize_bytes']
                        d['READBYTES'] = xfer_db['total_bytes_read']
                        d['WRITEBYTES'] = xfer_db['total_bytes_write']
                        d['DIDTIMEOUT'] = 0

                        def ts_to_str(ts):
                            return "{0:.02f}".format(ts)

                        # initialize times to 0.0
                        time_keys = [
                            'START', 'SOCKET', 'CONNECT', 'NEGOTIATE',
                            'REQUEST', 'RESPONSE', 'DATAREQUEST',
                            'DATARESPONSE', 'DATACOMPLETE', 'LAUNCH'
                        ]
                        for i in range(1, 10):
                            time_keys.append('DATAPERC{}0'.format(i))
                        for key in time_keys:
                            d[key] = 0.0

                        # since these are initialized to 0, it's OK if we are missing some times, e.g. due to read error
                        if 'unix_ts_start' in xfer_db:

                            # if we need to filter by date and the download did not start on that date, skip it
                            if self.date_filter is not None:
                                start_datetime = datetime.datetime.utcfromtimestamp(
                                    xfer_db['unix_ts_start'])
                                if start_datetime is not None:
                                    if not util.do_dates_match(
                                            self.date_filter,
                                            start_datetime.date()):
                                        logging.info(
                                            "skipping download because start date {} does not match filter date {}"
                                            .format(
                                                util.date_to_string(
                                                    start_datetime.date()),
                                                util.date_to_string(
                                                    self.date_filter)))
                                        continue

                            d['START'] = ts_to_str(xfer_db['unix_ts_start'])
                            if 'elapsed_seconds' in xfer_db:
                                if 'socket_create' in xfer_db[
                                        'elapsed_seconds']:
                                    d['SOCKET'] = ts_to_str(
                                        xfer_db['unix_ts_start'] +
                                        xfer_db['elapsed_seconds']
                                        ['socket_create'])
                                if 'socket_connect' in xfer_db[
                                        'elapsed_seconds']:
                                    d['CONNECT'] = ts_to_str(
                                        xfer_db['unix_ts_start'] +
                                        xfer_db['elapsed_seconds']
                                        ['socket_connect'])
                                if 'proxy_choice' in xfer_db[
                                        'elapsed_seconds']:
                                    d['NEGOTIATE'] = ts_to_str(
                                        xfer_db['unix_ts_start'] +
                                        xfer_db['elapsed_seconds']
                                        ['proxy_choice'])
                                if 'proxy_request' in xfer_db[
                                        'elapsed_seconds']:
                                    d['REQUEST'] = ts_to_str(
                                        xfer_db['unix_ts_start'] +
                                        xfer_db['elapsed_seconds']
                                        ['proxy_request'])
                                if 'proxy_response' in xfer_db[
                                        'elapsed_seconds']:
                                    d['RESPONSE'] = ts_to_str(
                                        xfer_db['unix_ts_start'] +
                                        xfer_db['elapsed_seconds']
                                        ['proxy_response'])
                                if 'command' in xfer_db['elapsed_seconds']:
                                    d['DATAREQUEST'] = ts_to_str(
                                        xfer_db['unix_ts_start'] +
                                        xfer_db['elapsed_seconds']['command'])
                                if 'response' in xfer_db['elapsed_seconds']:
                                    d['DATARESPONSE'] = ts_to_str(
                                        xfer_db['unix_ts_start'] +
                                        xfer_db['elapsed_seconds']['response'])

                                if 'payload_progress' in xfer_db[
                                        'elapsed_seconds']:
                                    # set DATAPERC[10,20,...,90]
                                    for decile in sorted(
                                            xfer_db['elapsed_seconds']
                                        ['payload_progress'].keys()):
                                        if decile in xfer_db['elapsed_seconds'][
                                                'payload_progress'] and xfer_db[
                                                    'elapsed_seconds'][
                                                        'payload_progress'][
                                                            decile] is not None:
                                            decile_as_int = int(
                                                float(decile) * 100)
                                            d['DATAPERC{0}'.format(
                                                decile_as_int
                                            )] = ts_to_str(
                                                xfer_db['unix_ts_start'] +
                                                xfer_db['elapsed_seconds']
                                                ['payload_progress'][decile])
                                else:
                                    for i in range(1, 10):
                                        d.pop('DATAPERC{}0'.format(i))

                                if 'last_byte' in xfer_db['elapsed_seconds']:
                                    d['DATACOMPLETE'] = ts_to_str(
                                        xfer_db['unix_ts_start'] +
                                        xfer_db['elapsed_seconds']['last_byte']
                                    )

                        if xfer_db['is_error']:
                            d['DIDTIMEOUT'] = 1  # could be ioerror or timeout or etc, torperf did not distinguish
                            d['ERRORCODE'] = xfer_db[
                                'error_code']  # this field is onionperf specific

                        # now get the tor parts
                        srcport = int(xfer_db['endpoint_local'].split(':')[2])
                        if srcport in streams_by_srcport:
                            stream_db = streams_by_srcport[srcport]
                            if 'failure_reason_local' in stream_db.keys():
                                d['ERRORCODE'] += '_' + stream_db[
                                    'failure_reason_local']
                                if 'failure_reason_remote' in stream_db.keys():
                                    d['ERRORCODE'] += '_' + stream_db[
                                        'failure_reason_remote']
                            circid = int(stream_db['circuit_id'] or 0)
                            if circid in circuits:
                                circuit_db = circuits[circid]

                                d['LAUNCH'] = circuit_db['unix_ts_start']
                                d['PATH'] = ','.join([
                                    item[0].split('~')[0]
                                    for item in circuit_db['path']
                                ])
                                d['BUILDTIMES'] = ','.join([
                                    str(item[1]) for item in circuit_db['path']
                                ])
                                d['TIMEOUT'] = circuit_db[
                                    'build_timeout'] if 'build_timeout' in circuit_db else None
                                d['QUANTILE'] = circuit_db[
                                    'build_quantile'] if 'build_quantile' in circuit_db else None
                                d['CIRC_ID'] = circid
                                d['USED_AT'] = stream_db['unix_ts_end']
                                d['USED_BY'] = int(stream_db['stream_id'])
                        if 'ERRORCODE' in d.keys():
                            d['ERRORCODE'] = ERRORS[d['ERRORCODE']]

                        output.write("@type torperf 1.1\r\n")
                        output_str = ' '.join("{0}={1}".format(k, d[k])
                                              for k in sorted(d.keys())
                                              if d[k] is not None).strip()
                        output.write("{0}\r\n".format(output_str))
                    except KeyError, e:
                        logging.warning(
                            "KeyError while exporting torperf file, missing key '{0}', skipping transfer '{1}'"
                            .format(str(e), xfer_db['transfer_id']))
                        continue

                output.close()
                logging.info("done!")