Example #1
0
def main():
    args = getargv()

    #quantify output
    matrix_map = map.map(args['fi'])

    matrix_inten_Top = TopInten(matrix_map, args['method'], args['n'],
                                args['rank'])

    tag = args['method'] + '_' + str(args['n']) + '_' + args['rank']

    writematrix(matrix_inten_Top, args, tag)

    #probe information

    probe_fo = args['fopath'] + args[
        'label'] + '_probe_information.txt' if 'label' in args else args[
            'fopath'] + 'probe_information.txt'
    annot_dir = probe(matrix_map, probe_fo)

    annot_file = args['fopath'] + args[
        'label'] + '_annotation.txt' if 'label' in args else args[
            'fopath'] + 'annotation.txt'
    label = args['label'] if 'label' in args else ''
    annotate(annot_dir, annot_file, label)

    print('Done!')
Example #2
0
def hash_insert(Table, key, scheme):
    print("Hash Insert algorithm: Inserting", key)

    hash_value = hash.hash_key(key, scheme)
    h_table = Table

    if scheme.collision_scheme == "linear" or scheme.collision_scheme == "quadratic":
        col, add, elem = probe.probe(h_table, key, hash_value, scheme, [])

    displayTable.print_table(h_table, scheme)
    def probe_devices(self, devlist, nosave=False):
        devs = set(devlist) - set(self.devices)
        devs, failed = probe.probe(devs)

        for d in devs:
            try:
                d.init(self.dbusconn)
                d.nosave = nosave
                self.devices.append(d)
            except:
                failed.append(str(d))
                d.destroy()

        return failed
Example #4
0
	def __init__(self):
		grc_wxgui.top_block_gui.__init__(self, title="Top Block")
		_icon_path = "/usr/share/icons/hicolor/32x32/apps/gnuradio-grc.png"
		self.SetIcon(wx.Icon(_icon_path, wx.BITMAP_TYPE_ANY))

		##################################################
		# Variables
		##################################################
		self.samp_rate = samp_rate = 100e3

		##################################################
		# Blocks
		##################################################
		self.wxgui_fftsink2_0 = fftsink2.fft_sink_c(
			self.GetWin(),
			baseband_freq=990e6,
			y_per_div=10,
			y_divs=10,
			ref_level=0,
			ref_scale=2.0,
			sample_rate=samp_rate,
			fft_size=1024,
			fft_rate=15,
			average=False,
			avg_alpha=None,
			title="FFT Plot",
			peak_hold=False,
		)
		self.Add(self.wxgui_fftsink2_0.win)
		self.uhd_usrp_source_0 = uhd.usrp_source(
			device_addr="addr=10.32.19.159",
			stream_args=uhd.stream_args(
				cpu_format="fc32",
				channels=range(1),
			),
		)
		self.uhd_usrp_source_0.set_samp_rate(samp_rate)
		self.uhd_usrp_source_0.set_center_freq(990e6, 0)
		self.uhd_usrp_source_0.set_gain(10, 0)
		self.uhd_usrp_source_0.set_antenna("RX2", 0)
		#self.analog_probe_avg_mag_sqrd_x_1 = analog.probe_avg_mag_sqrd_c(0, 0.5)
		self.probe_0=probe.probe(0,0.9)
		##################################################
		# Connections
		##################################################
		#self.connect((self.uhd_usrp_source_0, 0), (self.wxgui_fftsink2_0, 0))
		self.connect((self.uhd_usrp_source_0, 0), (self.probe_0, 0))
Example #5
0
	def __init__(self,options):
		grc_wxgui.top_block_gui.__init__(self, title="Top Block")
		_icon_path = "/usr/share/icons/hicolor/32x32/apps/gnuradio-grc.png"
		self.SetIcon(wx.Icon(_icon_path, wx.BITMAP_TYPE_ANY))
		#CHANGE ME
		self.cog_phy_0=phy.cog_phy(options.args)
		# dest_addt,source_addr,max_attempts,time_out
		self.probe_0=probe.probe(0,1)
		self.mac_0=aloha_mac.simple_arq(options.dest_addr,options.source_addr,options.max_attempts,options.time_out,self.probe_0)
		self.wake_up=heart_beat.heart_beat("check","wake_up",0.001)
		
		#CHANGE ME
		self.gr_file_source_0 = gr.file_source(gr.sizeof_char*1, options.input_file, True)
		
		#CHANGE ME
		self.gr_file_sink_0 = gr.file_sink(gr.sizeof_char*1, options.output_file)
		self.gr_file_sink_0.set_unbuffered(True)
		self.extras_stream_to_datagram_0 = grextras.Stream2Datagram(1, options.pkt_size)
		self.extras_datagram_to_stream_0 = grextras.Datagram2Stream(1)

		#self.tags_d_0=tags_demo.tags_demo()
		#self.extras_stream_to_datagram_1 = grextras.Stream2Datagram(1, 256)
		#self.extras_datagram_to_stream_1 = grextras.Datagram2Stream(1)
		
		
		##################################################
		# Connections
		##################################################
		self.connect((self.gr_file_source_0, 0), (self.extras_stream_to_datagram_0, 0))
		self.connect((self.extras_stream_to_datagram_0,0),(self.mac_0,1))
		self.connect((self.cog_phy_0,0),(self.mac_0,0))
		self.connect((self.mac_0,0),(self.cog_phy_0,0))
		self.connect((self.mac_0,1),(self.extras_datagram_to_stream_0,0))
		self.connect((self.extras_datagram_to_stream_0,0),(self.gr_file_sink_0,0))
		#self.connect((self.cog_phy_0,1),(self.wxgui_fftsink2_0,0))
		self.connect((self.wake_up,0),(self.mac_0,2))
		#self.connect((self.cog_phy_0,1),(self.probe_0,0))
		#self.connect((self.cog_phy_0,2),(self.wxgui_fftsink2_0,0))
		#self.connect((self.cog_phy_0,1),(self.mac_0,3))
		#self.connect((self.cog_phy_0,1),(self.tags_d_0,0))

		"""self.connect((self.gr_file_source_1, 0), (self.extras_stream_to_datagram_1, 0))
Example #6
0
 def _ffmpeg_convert(self, filename):
     print("post-processing " + filename, flush=True)
     with tempfile.TemporaryDirectory() as tmpdirname:
         filename = os.path.abspath(filename)
         tmpfilename = os.path.abspath(
             os.path.join(tmpdirname, os.path.basename(filename)))
         try:
             list(
                 run([
                     "/usr/bin/ffmpeg", "-i", filename, "-c", "copy",
                     tmpfilename
                 ]))
             shutil.move(tmpfilename, filename)
             list(
                 run([
                     "/usr/bin/ffmpeg", "-i", filename, "-vf",
                     "thumbnail,scale=640:360", "-frames:v", "1",
                     filename + ".png"
                 ]))
             return filename, probe(filename)
         except Exception as e:
             print("Exception: " + str(e), flush=True)
             return None, None
Example #7
0
 def ffmpeg_convert(self, filename):
     with tempfile.TemporaryDirectory() as tmpdirname:
         filename = os.path.abspath(filename)
         tmpfilename = os.path.abspath(
             os.path.join(tmpdirname, os.path.basename(filename)))
         output = ""
         try:
             list(
                 run([
                     "/usr/bin/ffmpeg", "-i", filename, "-c", "copy",
                     tmpfilename
                 ]))
             shutil.move(tmpfilename, filename)
             list(
                 run([
                     "/usr/bin/ffmpeg", "-i", filename, "-vf",
                     "thumbnail,scale=640:360", "-frames:v", "1",
                     filename + ".png"
                 ]))
             return filename, probe(filename)
         except Exception as error:
             logger.error("Error converting mp4 with ffmpeg: %s %s" %
                          (error, error.output))
             raise
Example #8
0
    def _rec2db(self, office, sensor, timestamp, path):
        dt = datetime.datetime.fromtimestamp(timestamp / 1000)
        officestr = (str(office[0]) + "c" + str(office[1])).replace(
            "-", "n").replace(".", "d")
        mp4path = self._storage + "/" + officestr + "/" + sensor + "/" + str(
            dt.year) + "/" + str(dt.month) + "/" + str(dt.day)
        os.makedirs(mp4path, exist_ok=True)
        mp4file = mp4path + "/" + str(timestamp) + ".mp4"

        list(
            run([
                "/usr/bin/ffmpeg", "-f", "mp4", "-i", path, "-c", "copy",
                mp4file
            ]))
        list(
            run([
                "/usr/bin/ffmpeg", "-i", mp4file, "-vf", "scale=640:360",
                "-frames:v", "1", mp4file + ".png"
            ]))
        sinfo = probe(mp4file)

        sinfo.update({
            "sensor": sensor,
            "office": {
                "lat": office[0],
                "lon": office[1],
            },
            "time": timestamp,
            "path": mp4file[len(self._storage) + 1:],
        })

        if local_office:
            # calculate total bandwidth
            bandwidth = 0
            for stream1 in sinfo["streams"]:
                if "bit_rate" in stream1:
                    bandwidth = bandwidth + stream1["bit_rate"]
            if bandwidth:
                db_cam = DBQuery(host=dbhost, index="sensors", office=office)
                db_cam.update(sensor, {"bandwidth": bandwidth})

            # check disk usage and send alert
            disk_usage = psutil.disk_usage(self._storage)[3]
            if disk_usage > 75 and sensor_index:
                level = "fatal" if disk_uage > 85 else "warning"
                db_alt = DBIngest(host=dbhost, index="alerts", office=office)
                db_alt.ingest({
                    "time":
                    int(
                        time.mktime(datetime.datetime.now().timetuple()) *
                        1000),
                    "office": {
                        "lat": office[0],
                        "lon": office[1],
                    },
                    "location": {
                        "lat": office[0],
                        "lon": office[1],
                    },
                    level: [{
                        "message": "Disk usage: " + str(disk_usage) + "%",
                        "args": {
                            "disk": disk_usage,
                        }
                    }]
                })

            # ingest recording local
            db_rec = DBIngest(host=dbhost, index="recordings", office=office)
            db_rec.ingest(sinfo)
        else:
            # ingest recording cloud
            db_rec = DBIngest(host=dbhost, index="recordings_c", office="")
            db_rec.ingest(sinfo)
Example #9
0
        except:
            print(traceback.format_exc(), flush=True)
            continue

        # check database to see if this camera is already registered
        r = None
        if dbhost:
            r = list(dbs.search("url='{}'".format(rtspuri), size=1))
            if r:
                if r[0]["_source"]["status"] != "disconnected":
                    print("Skipping {}:{}:{}".format(
                        ip, port, r[0]["_source"]["status"]),
                          flush=True)
                    continue

        sinfo = probe(rtspuri)
        if sinfo["resolution"]["width"] == 0 or sinfo["resolution"][
                "height"] == 0:
            print("Unknown width & height, skipping", flush=True)
            continue

        sinfo.update({
            'sensor': 'camera',
            'model': 'ip_camera',
            'url': rtspuri,
            'status': 'idle',
        })

        if not dbhost: continue

        try:
Example #10
0
# -- appengine/django setup
import django.conf
import django.template.loader

if not django.conf.settings.configured:
    django.conf.settings.configure(
        TEMPLATE_DIRS=('templates',),
    )

# -- the app --
import models
import probe

title = 'status of python.org services'
last_probe = probe.probe()

# render stats for last 7 measurements
probes = models.Sample.query().order(-models.Sample.time).fetch(limit=7)

print django.template.loader.render_to_string('status.html',
                      dict(title=title,
                           service=probe.SVC, url=probe.URL,
                           status=last_probe.status, details=last_probe.details,
                           latencyms=last_probe.latency*1000,
                           probes=probes))
Example #11
0
    def _rec2db(self, office, sensor, timestamp, path):
        disk_usage = psutil.disk_usage(self._storage)[3]
        if disk_usage < halt_rec_th:
            dt = datetime.datetime.fromtimestamp(timestamp / 1000)
            officestr = (str(office[0]) + "c" + str(office[1])).replace(
                "-", "n").replace(".", "d")
            mp4path = self._storage + "/" + officestr + "/" + sensor + "/" + str(
                dt.year) + "/" + str(dt.month) + "/" + str(dt.day)
            os.makedirs(mp4path, exist_ok=True)
            mp4file = mp4path + "/" + str(timestamp) + ".mp4"

            # perform a straight copy to fix negative timestamp for chrome
            list(
                run([
                    "/usr/local/bin/ffmpeg", "-f", "mp4", "-i", path, "-c",
                    "copy", mp4file
                ]))

            sinfo = probe(mp4file)
            sinfo.update({
                "sensor": sensor,
                "office": {
                    "lat": office[0],
                    "lon": office[1],
                },
                "time": timestamp,
                "path": mp4file[len(self._storage) + 1:],
            })
        else:
            print("Disk full: recording halted", flush=True)
            sinfo = None

        if local_office:
            if sinfo["bandwidth"]:
                db_cam = DBQuery(host=dbhost, index="sensors", office=office)
                db_cam.update(sensor, {"bandwidth": sinfo["bandwidth"]})

            # check disk usage and send alert
            disk_usage = psutil.disk_usage(self._storage).percent
            if disk_usage >= warn_disk_th:
                level = "fatal" if disk_usage >= fatal_disk_th else "warning"
                db_alt = DBIngest(host=dbhost, index="alerts", office=office)
                message = text["halt recording"].format(
                    disk_usage
                ) if disk_usage >= halt_rec_th else text["disk usage"].format(
                    disk_usage)
                db_alt.ingest({
                    "time":
                    int(time.time() * 1000),
                    "office": {
                        "lat": office[0],
                        "lon": office[1],
                    },
                    "location": {
                        "lat": office[0],
                        "lon": office[1],
                    },
                    level: [{
                        "message": message,
                        "args": {
                            "disk": disk_usage,
                        }
                    }]
                })

        # ingest recording local
        if sinfo:
            print("Ingest recording: {}".format(sinfo), flush=True)
            office1 = office if local_office else ""

            # denormalize sensor address to recordings
            dbs = DBQuery(host=dbhost, index="sensors", office=office1)
            r = list(dbs.search("_id='" + sinfo["sensor"] + "'", size=1))
            if r: sinfo["address"] = r[0]["_source"]["address"]

            db_rec = DBIngest(host=dbhost, index="recordings", office=office1)
            db_rec.ingest(sinfo)
Example #12
0
from probe import probe
from target import target
from crisprtree import preprocessing
from crisprtree import estimators
from crisprtree import evaluators

# File Paths to the input data
# Use argparse to dynamically input paths
paths = [
    'C:\\Users\\parth\\Desktop\\Python Workspace\\Senior Design\\Data\\hiv-1-700.fixed.fst',
    'C:\\Users\\parth\\Desktop\\Python Workspace\\Senior Design\\Data\\hiv-9086-9717.fixed.fst'
]

# Generate the probes/kmers for the Microarray & seqs that represent potential seqs for CRISPR
p = probe(paths).importSequences().generate()

# Print the probes to a csv file
# p.toCSV(p.kmers[0],'C:\Users\parth\Desktop\Python Workspace\Senior Design\Data\probes.csv')

# Generate the target sequences from the refrences HXB2 cell line
t = target(p.proto, paths).generate()

# Filter out target seqs that have missing bases '-' and create all possible valid pairs between the protospacer and the target sequnces.
#-------------------------------------------------------------------------------
inputSeqs = pd.DataFrame()
#p.proto = p.proto[0:1000]
for i, spacer in enumerate(p.proto[0]):
    if i % 1000 == 0:
        print(spacer, i)
    # Runs on the begining of the LTR (t.kmers[0]), the end of the LTR (t.kmers[1])
Example #13
0
def Probe():
    ret = 0
    PR = probe()
    ret = PR.ExecuteProbe(AP.target, AP.port, AP.protocol, AP.un, AP.pw, AP.filename)
    
    return ret
Example #14
0
dbs = DBQuery(index="sensors", office=office, host=dbhost)

# compete for a sensor connection
while True:
    try:
        for sensor in dbs.search(
                "type:'camera' and status:'disconnected' and office:[" +
                str(office[0]) + "," + str(office[1]) + "]"):
            try:
                if sensor["_source"]["url"].split(":")[0] != "rtmp": continue
                if "start_time" in sensor[
                        "_source"] and sensor["_source"]["start_time"] < 0:
                    continue
                rtmpuri = sensor["_source"]["url"]
                sinfo = probe(rtmpuri)
                if sinfo["resolution"]["width"] != 0 and sinfo["resolution"][
                        "height"] != 0:
                    print("RTMP status disconnected->idle:",
                          sensor["_id"],
                          sensor["_source"]["subtype"],
                          flush=True)
                    # ready for connecting
                    sinfo.update({"status": "idle"})
                r = dbs.update(sensor["_id"],
                               sinfo,
                               seq_no=sensor["_seq_no"],
                               primary_term=sensor["_primary_term"])
            except Exception as e:
                print("Exception: " + str(e), flush=True)
    except Exception as e:
Example #15
0
    def _rec2db(self, office, sensor, timestamp, path):
        disk_usage=psutil.disk_usage(self._storage)[3]
        if disk_usage<halt_rec_th:
            dt=datetime.datetime.fromtimestamp(timestamp/1000)
            officestr=(str(office[0])+"c"+str(office[1])).replace("-","n").replace(".","d")
            mp4path=self._storage+"/"+officestr+"/"+sensor+"/"+str(dt.year)+"/"+str(dt.month)+"/"+str(dt.day)
            os.makedirs(mp4path,exist_ok=True)
            mp4file=mp4path+"/"+str(timestamp)+".mp4"

            # perform a straight copy to fix negative timestamp for chrome
            list(run(["/usr/local/bin/ffmpeg","-f","mp4","-i",path,"-c","copy",mp4file]))

            sinfo=probe(mp4file)
            sinfo.update({
                "sensor": sensor,
                "office": {
                    "lat": office[0],
                    "lon": office[1],
                },
                "time": timestamp,
                "path": mp4file[len(self._storage)+1:],
            })
        else:
            print("Disk full: recording halted", flush=True)
            sinfo=None

        if local_office:
            if sinfo["bandwidth"]:
                db_cam=DBQuery(host=dbhost, index="sensors", office=office)
                db_cam.update(sensor, {"bandwidth": sinfo["bandwidth"]})

            # check disk usage and send alert
            disk_usage=psutil.disk_usage(self._storage).percent
            if disk_usage>=warn_disk_th:
                level="fatal" if disk_usage>=fatal_disk_th else "warning"
                db_alt=DBIngest(host=dbhost, index="alerts", office=office)
                message=text["halt recording"].format(disk_usage) if disk_usage>=halt_rec_th else text["disk usage"].format(disk_usage)
                db_alt.ingest({
                    "time": int(time.time()*1000),
                    "office": {
                        "lat": office[0],
                        "lon": office[1],
                    },
                    "location": {
                        "lat": office[0],
                        "lon": office[1],
                    },
                    level: [{
                        "message": message,
                        "args": {
                            "disk": disk_usage,
                        }
                    }]
                })

            # ingest recording local
            if sinfo:
                db_rec=DBIngest(host=dbhost, index="recordings", office=office)
                db_rec.ingest(sinfo)
        else:
            # ingest recording cloud
            if sinfo:
                db_s=DBQuery(host=dbhost, index="sensors", office=sinfo["office"])
                sensor=list(db_s.search("_id='"+sinfo["sensor"]+"'",size=1))
                if sensor:
                    # remove status
                    sensor[0]["_source"].pop("status",None)
                    # denormalize address
                    sinfo["address"]=sensor[0]["_source"]["address"]

                    # calcualte hash code for the sensor
                    m=hashlib.md5()
                    m.update(json.dumps(sensor[0]["_source"],ensure_ascii=False).encode('utf-8'))
                    md5=m.hexdigest()

                    # locate the sensor record in cloud
                    db_sc=DBQuery(host=dbhost, index="sensors", office="")
                    sensor_c=list(db_sc.search("md5='"+md5+"'",size=1))
                    if not sensor_c:  # if not available, ingest a sensor record in cloud
                        sensor_c=[{ "_source": sensor[0]["_source"].copy() }]
                        sensor_c[0]["_source"]["md5"]=md5
                        db_sc=DBIngest(host=dbhost, index="sensors", office="")
                        print("Ingest sensor: {}".format(sensor_c[0]["_source"]), flush=True)
                        sensor_c[0]=db_sc.ingest(sensor_c[0]["_source"])

                    # replace cloud sensor id and ingest recording
                    sinfo["sensor"]=sensor_c[0]["_id"]

                    print("Ingest recording: {}".format(sinfo), flush=True)
                    db_rec=DBIngest(host=dbhost, index="recordings", office="")
                    db_rec.ingest(sinfo)

                    # copy local analytics to cloud
                    db_a=DBQuery(host=dbhost, index="analytics", office=sinfo["office"])
                    data=[]
                    for r in db_a.search('sensor="'+sensor[0]["_id"]+'" and office:['+str(office[0])+','+str(office[1])+'] and time>='+str(sinfo["time"])+' and time<='+str(sinfo["time"]+sinfo["duration"]*1000),size=10000):
                        r["_source"]["sensor"]=sinfo["sensor"]
                        data.append(r["_source"])
                    db_ac=DBIngest(host=dbhost, index="analytics", office="")
                    print("Ingest analytics: {}".format(len(data)), flush=True)
                    db_ac.ingest_bulk(data)
Example #16
0
def make_hash_table(input_list, scheme):

    t1 = time.time()

    class Table_Class:
        def __init__(self, key, next, prev):
            self.key = key
            self.next = next
            self.prev = prev

    hash_table = list()
    empty_stack = deque()

    for x in range(scheme.table_size):
        hash_table.append(
            Table_Class([None for y in range(scheme.bucket_size)], None, None))
        empty_stack.append(x)

    total_collision = 0
    address_accessed = 0
    elements_stored = 0
    failed = list()

    for key in input_list:
        hash_value = hash_key(key, scheme)

        if hash_value >= scheme.table_size:
            failed.append(key)
            continue

        if scheme.collision_scheme == "chaining":
            col, add, elem = chain.chain(hash_table, empty_stack, key,
                                         hash_value, failed)
            total_collision += col
            address_accessed += add
            elements_stored += elem

        elif scheme.collision_scheme == "linear" or scheme.collision_scheme == "quadratic":
            col, add, elem = probe.probe(hash_table, key, hash_value, scheme,
                                         failed)
            total_collision += col
            address_accessed += add
            elements_stored += elem

    class Stats_Class:
        def __init__(self, col, add, fail):
            self.collision = col  # Number of collisions
            self.accessed = add  # Number of addresses accessed
            self.failed_keys = fail  # All keys failed to be input into hash table

    hash_stats = Stats_Class(total_collision, address_accessed, failed)
    hash_stats.input_size = len(input_list)
    hash_stats.elements_stored = elements_stored
    load_factor = elements_stored / (scheme.table_size * scheme.bucket_size)
    hash_stats.load_factor = load_factor

    t2 = time.time()

    hash_stats.time_elapsed = (t2 - t1) * 1000000

    display.display_hash_table(hash_table, scheme, hash_stats)

    return hash_table
Example #17
0
 def scan_units(self, units, rate):
     mlist = [[self.mode, self.tty, rate, u] for u in units]
     d = probe.probe(mlist, self.progress, 1, timeout=self.timeout)
     return d[0]
Example #18
0
#@+leo-ver=4-thin
#@+node:gcross.20090818114910.1235:@thin run.py
import sys
sys.path.append("lib")

from probe import probe, my_rank

n_particles = int(sys.argv[1])
frame_angular_velocity = float(sys.argv[2])

if my_rank == 0:
    for N_rotating_particles in xrange(n_particles+1):
        energy, denergy = probe(n_particles,frame_angular_velocity,N_rotating_particles)
        print N_rotating_particles, energy, denergy
else:
    for N_rotating_particles in xrange(n_particles+1):
        probe(n_particles,frame_angular_velocity,N_rotating_particles)
#@-node:gcross.20090818114910.1235:@thin run.py
#@-leo
Example #19
0
# -- appengine/django setup
import django.conf
import django.template.loader

if not django.conf.settings.configured:
    django.conf.settings.configure(TEMPLATE_DIRS=('templates', ), )

# -- the app --
import models
import probe

title = 'status of python.org services'
last_probe = probe.probe()

# render stats for last 7 measurements
probes = models.Sample.query().order(-models.Sample.time).fetch(limit=7)

print django.template.loader.render_to_string(
    'status.html',
    dict(title=title,
         service=probe.SVC,
         url=probe.URL,
         status=last_probe.status,
         details=last_probe.details,
         latencyms=last_probe.latency * 1000,
         probes=probes))
Example #20
0
 def scan(self):
     for net in self.nets:
         log.info('Scanning %s', net)
         hosts = filter(net.ip.__ne__, net.network.hosts())
         mlist = [[self.proto, str(h), self.port, self.unit] for h in hosts]
         probe.probe(mlist, self.progress, 4, timeout=self.timeout)