def apply_config(self): global VALUES config_reload() self.cmd = config_get("misc.cmd", str) self.ip = config_get("misc.ip", str) self.port = config_get("misc.port", int) self.values = VALUES[self.cmd]
def work(self): camera = picamera.PiCamera() camera.hflip = config_get("raspberry_horizontal_flip") camera.vflip = config_get("raspberry_vertical_flip") camera.resolution = (config_get("raspberry_resolution_x"), config_get("raspberry_resolution_y")) camera.start_preview() time.sleep(2) stream = io.BytesIO() released = True try: for notUsed in camera.capture_continuous( stream, format='jpeg', resize=None, quality=config_get("raspberry_base_quality")): self.frame_cd.acquire() released = False self.frame = stream.getvalue() self.frame_cd.notify_all() self.frame_cd.release() released = True stream.seek(0) stream.truncate() if not self.keep_working: break finally: camera.close() if not released: self.frame_cd.release()
def work(self): camera = picamera.PiCamera() camera.hflip = config_get("raspberry_horizontal_flip") camera.vflip = config_get("raspberry_vertical_flip") camera.resolution = ( config_get("raspberry_resolution_x"), config_get("raspberry_resolution_y") ) camera.start_preview() time.sleep(2) stream = io.BytesIO() released = True try: for notUsed in camera.capture_continuous(stream, format='jpeg', resize=None, quality=config_get("raspberry_base_quality")): self.frame_cd.acquire() released = False self.frame = stream.getvalue() self.frame_cd.notify_all() self.frame_cd.release() released = True stream.seek(0) stream.truncate() if not self.keep_working: break finally: camera.close() if not released: self.frame_cd.release()
def getParentArgParse(args=None): parser = argparse.ArgumentParser(add_help=False) parser.add_argument('--binSize', '-bs', help='Size of the bins in bp for the ouput ' 'of the bigwig/bedgraph file', metavar="INT bp", type=int, default=50) parser.add_argument('--region', '-r', help='Region of the genome to limit the operation ' 'to - this is useful when testing parameters to ' 'reduce the computing time. The format is ' 'chr:start:end, for example --region chr10 or ' '--region chr10:456700:891000', metavar="CHR:START:END", required=False, type=genomicRegion) parser.add_argument('--numberOfProcessors', '-p', help='Number of processors to use. Type "max/2" to ' 'use half the maximun number of processors or "max" ' 'to use all available processors.', metavar="INT", type=numberOfProcessors, default=cfg.config_get('general', 'default_proc_number'), required=False) parser.add_argument('--verbose', '-v', help='Set to see processing messages.', action='store_true') return parser
def test_config_get(self): """Test the most important function of the config module: config_get""" # generate and set data configuration = [] for key, value in self.testconfig.iteritems(): configuration += ["--" + key] if value: configuration += [str(value)] config.set_data(configuration) # check we can read back the data we set config.config_get(('server', 'port')) self.assertEqual(config.config_get(('server', 'port')), 6000, "can't find argument values set by set_data") self.assertEqual( config.config_get("nonexisting", None), None, "config_get doesn't return None on a nonexisting param")
def test_config_get(self): """Test the most important function of the config module: config_get""" # generate and set data configuration = [] for key, value in self.testconfig.iteritems(): configuration += ["--" + key] if value: configuration += [str(value)] config.set_data(configuration) # check we can read back the data we set config.config_get(('server', 'port')) self.assertEqual(config.config_get(('server', 'port')), 6000, "can't find argument values set by set_data") self.assertEqual(config.config_get("nonexisting", None), None, "config_get doesn't return None on a nonexisting param")
def initialize_app(self, argv): self.LOG.debug('initialize_app') logging.getLogger("requests").setLevel(logging.WARNING) self._config = config_get(USER_CONFIG_PATH) self._session = OxCliSession(self._config['url'], self._config['user'], self._config['password']) self.LOG.debug('config and session ready')
def test_set_data_and_set_conf(self): """Test the set_data and set_conf functions""" # generate and set data configuration = [] for key, value in self.testconfig.iteritems(): configuration += ["--" + key] if value: configuration += [str(value)] config.set_data(configuration) # check we can read back the data we set self.assertEqual(config.config_get(("server", "port")), 6000, "can't find argument values set by set_data") self.assertEqual(config.config_get(("logging", "verbose")), True, "can't find argument values set by set_data") # check that the system exits when we give unrecognized arguments config.set_data("--some values --that --dont --exist".split()) self.assertRaises(SystemExit, config._set_conf)
def test(): from data_load.sql import HubbleBase from config import config_parser, config_get config = config_parser() uri = config_get(config, 'HubbleConnection', 'uri') session = start_session(uri, HubbleBase) qm = QueryManager(session, datetime.now() - timedelta(days=7), datetime.now()) o = qm.query_parts() print('done')
def test_set_data_and_set_conf(self): """Test the set_data and set_conf functions""" # generate and set data configuration = [] for key, value in self.testconfig.iteritems(): configuration += ["--" + key] if value: configuration += [str(value)] config.set_data(configuration) # check we can read back the data we set self.assertEqual(config.config_get(("server","port")), 6000, "can't find argument values set by set_data") self.assertEqual(config.config_get(("logging", "verbose")), True, "can't find argument values set by set_data") # check that the system exits when we give unrecognized arguments config.set_data("--some values --that --dont --exist".split()) self.assertRaises(SystemExit, config._set_conf)
def mask(self, stft): f1 = config_get("preprocess.mask_f1", float) f2 = config_get("preprocess.mask_f2", float) t1 = config_get("preprocess.mask_t1", float) t2 = config_get("preprocess.mask_t2", float) s = stft.shape mask = np.ones(s) for i in xrange(s[0]): if i < float(t1) * s[0] or s[0] - i < float(t2) * s[0]: mask[i] = 0 for i in xrange(s[1]): if np.abs(i - (s[1] / 2)) < float(f1) * s[1]: mask[:, i] = 0 if i < float(f2) * s[1] or s[1] - i < float(f2) * s[1]: mask[:, i] = 0 return mask * stft
def preprocess(self, trace, debug=False): s = trace if self.stft: s = stft(s, self.fft_len, self.fft_step, log=self.stft_log) if config_get("preprocess.static_alignmet", bool): s = self.static_alignment_stft(s) if debug: plot(s, f0=cap.frequency, samp_rate=cap.samp_rate, fft_step=pre.fft_step, title="Aligned Trace", png="/tmp/aligned.png") if config_get("preprocess.mask", bool): s = self.mask(s) return s
def mask(self, stft): f1 = config_get("preprocess.mask_f1", float) f2 = config_get("preprocess.mask_f2", float) t1 = config_get("preprocess.mask_t1", float) t2 = config_get("preprocess.mask_t2", float) s = stft.shape mask = np.ones(s) for i in xrange(s[0]): if i < float(t1)*s[0] or s[0]-i < float(t2)*s[0]: mask[i] = 0 for i in xrange(s[1]): if np.abs(i - (s[1] / 2)) < float(f1)*s[1]: mask[:,i] = 0 if i < float(f2)*s[1] or s[1]-i < float(f2)*s[1]: mask[:,i] = 0 return mask*stft
def __init__(self): self.i = 0 self.tb = None self.reference = None self.do_read = False self.config_reload() self.demod_decimation = 1 self.trig_decimation = 100 if self.offline: self.files = glob.glob(self.tracedir + "/*.cfile") shuffle(self.files) else: self.trig_fifo = "/tmp/trig.fifo" self.demod_fifo = "/tmp/demod.fifo" log.debug("creating fifo") self.bufflen = 1024 try: os.unlink(self.trig_fifo) os.unlink(self.demod_fifo) except: pass os.mkfifo(self.trig_fifo) os.mkfifo(self.demod_fifo) #start recv_thread self.queue = Queue(maxsize=1024) self.recv_t = threading.Thread(target=self.read, args=(self.queue, )) self.recv_t.start() #create top_block top_block = imp.load_source("top_block", "grc/top_block.py") self.tb = top_block.top_block() self.tb.Start(True) print "started" self.tb_t = threading.Thread(target=self.tb.Wait) self.tb_t.start() log.debug("top_block created") self.tb_configure() log.debug("top_bilock configured") dut = imp.load_source("dut", config_get("dut")) self.dut = dut.dut() if self.dump: try: os.makedirs(self.tracedir) except: pass
def bedGraphToBigWig(chromSizes, bedGraphPath, bigWigPath, sort=True): """ takes a bedgraph file, orders it and converts it to a bigwig file using command line tools. Will fail if the bedGraphToBigWig path changes. """ from tempfile import NamedTemporaryFile from os import remove, system # destination to save chromosome names and sizes _file2 = NamedTemporaryFile(delete=False) # bedGraph to bigwig requires the chromosome sizes to be # saved into a file for chrom, size in chromSizes: _file2.write("{}\t{}\n".format(chrom, size)) _file2.close() chrSizesFileName = _file2.name if sort: sort_cmd = cfg.config_get('external_tools', 'sort') # temporary file to store sorted bedgraph file _file = NamedTemporaryFile(delete=False) tempFileName1 = _file.name system("{} -k1,1 -k2,2n {} > {}".format(sort_cmd, bedGraphPath, tempFileName1)) bedGraphPath = tempFileName1 bedgraph_to_bigwig = cfg.config_get('external_tools', 'bedgraph_to_bigwig') system("{} {} {} {}".format(bedgraph_to_bigwig, bedGraphPath, chrSizesFileName, bigWigPath)) if sort: remove(tempFileName1) remove(chrSizesFileName)
def preprocess(self, trace, debug=False): s = trace if self.stft: s = stft(s, self.fft_len, self.fft_step, log=self.stft_log) if debug: plot(s, f0=cap.frequency, samp_rate=cap.samp_rate, fft_step=pre.fft_step, title="Aligned Trace") if config_get("preprocess.mask", bool): s = self.mask(s) else: s = np.abs(s) return s
def getlogger(): log_path = config_get("logdir") if not log_path: print "log path can't find in config file'" exit(1) if not os.path.exists(os.path.dirname(log_path)): os.makedirs(os.path.dirname(log_path)) logger = logging.getLogger() logger.setLevel(logging.DEBUG) if not logger.handlers: handler = logging.handlers.TimedRotatingFileHandler(log_path, 'D') # fmt = logging.Formatter(“%(asctime)s – %(pathname)s – %(filename)s – %(funcName)s – %(lineno)s – %(levelname)s – %(message)s”, “%Y-%m-%d %H:%M:%S”) fmt = logging.Formatter( "%(asctime)s – %(pathname)s – %(filename)s – %(funcName)s – %(lineno)s – %(levelname)s – %(message)s" ) handler.setFormatter(fmt) logger.addHandler(handler) return logger
def center(self): self.servo_write(self.panServoId, config_get("pan_center")) self.servo_write(self.tiltServoId, config_get("tilt_center"))
# this block should be first if We want # has section 'Main' on top of config file from config import config_get # config format: float, minutes INTERVAL = int(float(config_get("Main", "break_interval", 15.0)) * 60) # config format: int, seconds PREPARING_TIME = int(config_get("Main", "preparing_time", 3)) if(PREPARING_TIME > INTERVAL): exit("[ERROR] preparing_time can't be greater than break_interval") if(PREPARING_TIME < 0): exit("[ERROR] preparing_time can't be negative number") from time import sleep from ui.gtk.gtk_break_screen import GtkBreakScreen from core.core import UsefulB ui = GtkBreakScreen() cor = UsefulB(ui) while True: sleep(INTERVAL - PREPARING_TIME) if(PREPARING_TIME != 0): ui.sendNotify("Get ready for a break after " + str(PREPARING_TIME) + " seconds", PREPARING_TIME) sleep(PREPARING_TIME) # it will wait until problem is solved cor.start()
def check_auth(username, password): return username == config_get("auth_login") and password == config_get("auth_password")
def apply_config(self): self.outdir = config_get("misc.outdir", str) self.samp_rate = config_get("capture.samp_rate", int)
def left(self): self.servo_write(self.panServoId, "+" + config_get("pan_step"))
def writeBedGraph( bamOrBwFileList, outputFileName, fragmentLength, func, funcArgs, tileSize=25, region=None, numberOfProcessors=None, format="bedgraph", extendPairedEnds=True, zerosToNans=True, smoothLength=0, fixed_step=False): r""" Given a list of bamfiles, a function and a function arguments, this method writes a bedgraph file (or bigwig) file for a partition of the genome into tiles of given size and a value for each tile that corresponds to the given function and that is related to the coverage underlying the tile. >>> test = Tester() >>> outFile = tempfile.NamedTemporaryFile() >>> funcArgs = {'scaleFactor': 1.0} >>> writeBedGraph([(test.bamFile1, 'bam')], outFile.name, ... 0, scaleCoverage, funcArgs, region='3R:0:200') >>> open(outFile.name, 'r').readlines() ['3R\t100\t200\t1.0\n'] >>> outFile.close() """ bigwig_info = cfg.config_get('external_tools', 'bigwig_info') bamHandlers = [openBam(indexedFile) for indexedFile, fileFormat in bamOrBwFileList if fileFormat == 'bam'] if len(bamHandlers): genomeChunkLength = getGenomeChunkLength(bamHandlers, tileSize) # check if both bam files correspond to the same species # by comparing the chromosome names: chromNamesAndSize = getCommonChrNames(bamHandlers, verbose=False) else: genomeChunkLength = int(10e6) bigwigs = [fileName for fileName, fileFormat in bamOrBwFileList if fileFormat == 'bigwig'] cCommon = [] chromNamesAndSize = {} for bw in bigwigs: inBlock = False for line in os.popen( "{} -chroms {}".format(bigwig_info, bw)).readlines(): if line[0:10] == "chromCount": inBlock = True continue if line[0:5] == "bases": break if inBlock: chromName, id, size = line.strip().split(" ") size = int(size) if chromName in chromNamesAndSize: cCommon.append(chromName) if chromNamesAndSize[chromName] != size: print "\nWARNING\n" \ "Chromosome {} length reported in the " \ "bigwig files differ.\n{} for {}\n" \ "{} for {}.\n\nThe smallest " \ "length will be used".format( chromName, chromNamesAndSize[chromName], bigwigs[0], size, bigwigs[1]) chromNamesAndSize[chromName] = min( chromNamesAndSize[chromName], size) else: chromNamesAndSize[chromName] = size # get the list of common chromosome names and sizes chromNamesAndSize = [(k, v) for k, v in chromNamesAndSize.iteritems() if k in cCommon] if region: # in case a region is used, append the tilesize region += ":{}".format(tileSize) res = mapReduce.mapReduce((tileSize, fragmentLength, bamOrBwFileList, func, funcArgs, extendPairedEnds, smoothLength, zerosToNans, fixed_step), writeBedGraph_wrapper, chromNamesAndSize, genomeChunkLength=genomeChunkLength, region=region, numberOfProcessors=numberOfProcessors) # concatenate intermediary bedgraph files outFile = open(outputFileName + ".bg", 'wb') for tempFileName in res: if tempFileName: # concatenate all intermediate tempfiles into one # bedgraph file shutil.copyfileobj(open(tempFileName, 'rb'), outFile) os.remove(tempFileName) bedGraphFile = outFile.name outFile.close() if format == 'bedgraph': os.rename(bedGraphFile, outputFileName) if debug: print "output file: %s" % (outputFileName) else: bedGraphToBigWig( chromNamesAndSize, bedGraphFile, outputFileName, False) if debug: print "output file: %s" % (outputFileName) os.remove(bedGraphFile)
from datetime import datetime from config import config_get, CONFIG_FOLDER_PATH # config format: path STAT_PATH = config_get("Stats", "file_path", str(CONFIG_FOLDER_PATH / "stats")) class Stats: def __init__(self): time = datetime.now().strftime('%Y-%m-%d %H:%M:%S') from os.path import isfile newline = "\n" if isfile(STAT_PATH) else "" self.write(newline + "--- Started at " + time + " ---", False) def log_solving(self, task): txt = self.construct_task_name(task) # tag 'processing' txt += "[~] " txt += task.getTextSolving() self.write(txt) def log_wrong(self, task, ans): txt = self.construct_task_name(task) # tag 'wrong' txt += "[-] " txt += task.getTextWrong(ans) self.write(txt) def log_good(self, task): txt = self.construct_task_name(task) # tag 'good'
def config_reload(self): config_reload() #config self.capture_frequency = config_get("capture.frequency", int) self.capture_samp_rate = config_get("capture.samp_rate", int) self.capture_gain = config_get("capture.gain", int) self.offline = False self.dump = False #trigger self.trigger_low_pass = config_get("trigger.low_pass", int) self.trigger_frequency = config_get("trigger.frequency", int) self.trigger_delay = config_get("trigger.delay", float) self.trigger_pre = config_get("trigger.pre", float) self.trigger_post = config_get("trigger.post", float) #demod self.demod_select = config_get("demod.select", list) self.demod_frequency = config_get("demod.frequency", int) self.demod_bandpass_low = config_get("demod.bandpass_low", int) self.demod_bandpass_high = config_get("demod.bandpass_high", int) self.demod_decimation = config_get("demod.decimation", int) #stft self.stft = config_get("preprocess.stft", bool) self.stft_log = config_get("preprocess.stft_log", bool) self.fft_len = config_get("preprocess.fft_len", int) self.fft_step = config_get("preprocess.fft_step", int) self.samp_rate = self.capture_samp_rate / self.demod_decimation self.demod_bandpass_high = min(self.samp_rate / 2, self.demod_bandpass_high) if self.demod_select[0] == 0: self.demod_decimation = 1 self.frequency = self.capture_frequency elif self.demod_select[0] == 1: self.demod_decimation = 1 self.frequency = self.trigger_frequency elif self.demod_select[0] == 2: self.frequency = self.trigger_frequency elif self.demod_select[0] == 3: self.frequency = 0
def apply_config(self): config_reload() self.port = config_get("misc.port", int)
def up(self): self.servo_write(self.tiltServoId, "-" + config_get("tilt_step"))
def down(self): self.servo_write(self.tiltServoId, "+" + config_get("tilt_step"))
def computeMatrixOptArgs(case=['scale-regions', 'reference-point'][0]): parser = argparse.ArgumentParser(add_help=False) optional = parser.add_argument_group('Optional arguments') if case == 'scale-regions': optional.add_argument('--regionBodyLength', '-m', default=1000, type=int, help='Distance in bp to which all regions are ' 'going to be fitted.') optional.add_argument('--startLabel', default='TSS', help='Label shown in the plot for the start of ' 'the region. Default is TSS (transcription ' 'start site), but could be changed to anything, ' 'e.g. "peak start".' 'Same for the --endLabel option. See below') optional.add_argument('--endLabel', default='TES', help='Label shown in the plot for the region ' 'end. Default is TES (transcription end site).') optional.add_argument('--beforeRegionStartLength', '-b', '--upstream', default=0, type=int, help='Distance upstream of the start site of ' 'the regions defined in the region file. If the ' 'regions are genes, this would be the distance ' 'upstream of the transcription start site.') optional.add_argument('--afterRegionStartLength', '-a', '--downstream', default=0, type=int, help='Distance downstream of the end site ' 'of the given regions. If the ' 'regions are genes, this would be the distance ' 'downstream of the transcription end site.') elif case == 'reference-point': optional.add_argument('--referencePoint', default='TSS', choices=['TSS', 'TES', 'center'], help='The reference point for the plotting ' 'could be either the region start (TSS), the ' 'region end (TES) or the center of the region. ') # set region body length to zero for reference point mode optional.add_argument('--regionBodyLength', help=argparse.SUPPRESS, default=0, type=int) optional.add_argument('--beforeRegionStartLength', '-b', '--upstream', default=500, type=int, metavar='INT bp', help='Distance upstream of the reference-point ' 'selected ', required=True) optional.add_argument('--afterRegionStartLength', '-a', '--downstream', default=1500, metavar='INT bp', type=int, help='Distance downstream of the ' 'reference-point selected', required=True) optional.add_argument('--nanAfterEnd', action='store_true', help='If set, any values after the region end ' 'are discarted. This is useful to visualize ' 'the region end when not using the ' 'scale-regions mode and when the reference-' 'point is set to the TSS') optional.add_argument('--binSize', '-bs', help='Length, in base pairs, of the non-overlapping ' 'bin for averaging the score over the ' 'regions length', type=int, default=10) optional.add_argument('--sortRegions', help='Whether the output file should present the ' 'regions sorted. The default is to sort in ' 'descending order based on ' 'the mean value per region.', choices=["descend", "ascend", "no"], default='no') optional.add_argument('--sortUsing', help='Indicate which method should be used for ' 'sorting. The value is computed for each row.', choices=["mean", "median", "max", "min", "sum", "region_length"], default='mean') optional.add_argument('--averageTypeBins', default='mean', choices=["mean", "median", "min", "max", "std", "sum"], help='Define the type of statistic that should be ' 'used over the bin size range. The ' 'options are: "mean", "median", "min", "max" and ' '"std". The default is "mean"') optional.add_argument('--missingDataAsZero', help='[only for bigwig input] Set to "yes", if ' 'missing data should be indicated as zeros. Default ' 'is to ignore such cases which will be depicted as ' 'black areas in the heatmap. (see ' '--missingDataColor argument of the heatmapper ' 'for additional options)', action='store_true') optional.add_argument('--skipZeros', help='Whether regions with only scores of zero ' 'should be included or not. Default is to include ' 'them. ', action='store_true') optional.add_argument('--minThreshold', default=None, type=float, help='Numeric value. Any region containing a ' 'value that is equal or less than this numeric ' 'value will be skipped. This is useful to skip, ' 'for example, genes where the read count is zero ' 'for any of the bins. This could be the result of ' 'unmappable areas and can bias the overall results.') optional.add_argument('--maxThreshold', default=None, type=float, help='Numeric value. Any region containing a value ' 'that is equal or higher that this numeric value ' 'will be skipped. The maxThreshold is useful to ' 'skip those few regions with very high read counts ' '(e.g. major satellites) that may bias the average ' 'values.') optional.add_argument('--verbose', help='set to yes, to see Warning messages and ' 'other information', action='store_true') optional.add_argument('--scale', help='If set, all values are multiplied by ' 'this number', type=float, default=1) optional.add_argument('--numberOfProcessors', '-p', help='Number of processors to use. Type "max/2" to ' 'use half the maximun number of processors or "max" ' 'to use all available processors.', metavar="INT", type=numberOfProcessors, default=cfg.config_get('general', 'default_proc_number'), required=False) return parser
def right(self): self.servo_write(self.panServoId, "-" + config_get("pan_step"))
def config_reload(self): config_reload() #config self.center_frequency = config_get("capture.center_frequency", int) self.capture_samp_rate = config_get("capture.samp_rate", int) self.capture_gain = config_get("capture.gain", int) self.offline = False self.dump = False #trigger self.trigger_frequency = config_get("capture.trigger_frequency", int) self.trigger_delay = config_get("capture.delay", float) self.trigger_execution_time = config_get("capture.execution_time", float) #demod self.demod_select = config_get("capture.demod", int) self.demod_frequency = config_get("capture.demod_frequency", int) self.demod_lowpass = config_get("capture.demod_lowpass", int) self.demod_bandpass_low = config_get("capture.demod_bandpass_low", int) self.demod_bandpass_high = config_get("capture.demod_bandpass_high", int) #stft self.stft = config_get("preprocess.stft", bool) self.stft_log = config_get("preprocess.stft_log", bool) self.fft_len = config_get("preprocess.fft_len", int) self.fft_step = config_get("preprocess.fft_step", int)
def apply_config(self): config_reload() self.cmd = config_get("misc.cmd", str) self.ip = config_get("misc.ip", str) self.port = config_get("misc.port", int) self.test_value = n2hex(2**2048 - 1)
from pathlib import Path BUILTIN_TASKS_FOLDER_PATH = str( (Path(__file__).parent / "../../tasks").resolve()) from config import config_get from ast import literal_eval # config format: list of paths to folders with .py files that contains class 'Task' TASKS_FOLDERS_PATHS = literal_eval( config_get("Core", "tasks_folders_paths", [BUILTIN_TASKS_FOLDER_PATH])) # check all elements in list is str if not all(isinstance(x, str) for x in TASKS_FOLDERS_PATHS): exit( "'tasks_folders_paths' is list ONLY with strings\n(try wrap all elements to quotes)" ) from glob import glob py_files = [] for p in TASKS_FOLDERS_PATHS: py_files += glob(p + "/*.py") module_by_name_dict = {} from importlib import util as import_util for m in py_files: # this is module file name (as file.py), not task name m_name = m.split("/")[-1] spec = import_util.spec_from_file_location(m_name, m) m_exe = import_util.module_from_spec(spec) spec.loader.exec_module(m_exe) task_name = m_exe.TASK_NAME
import discord import sqlalchemy import re from config import config_get from db import Session, reset_db, Message reset_db() s = Session() config = config_get() def all(iterable, func): for x in iterable: if not func(x): return False return True def strip(iterable): for x in iterable: yield x.strip() class R9kBot(discord.Client): async def on_ready(self): print('Logged on as', self.user) async def on_message(self, message): if message.author == self.user: