def load_avro_schema_string(self, topic_name):
     if topic_name not in self.config_avro_location:
         raise ErrorHandler("Error. Application does not have avro schema for requested topic")
     try:
         with open(self.directory_avro_schemas + "/" + self.config_avro_location.get(topic_name),
                   'r') as schema_file:
             return schema_file.read().replace('\n', '')
     except Exception as e:
         raise ErrorHandler("Error. Unable to load schema: " + str(e))
Example #2
0
def replace_docx(path, old_str, new_str):
    doc = open_docx(path)
    # check none
    if not isinstance(old_str, str) or not isinstance(new_str, str):
        e = "the parameter in replace_docx must not be None"
        return ErrorHandler(path, e)
    # replace the string
    try:
        # paragraph processing
        for para in doc.paragraphs:
            # each para call  matchIndexs
            if old_str in para.text:
                matchedIndices = findMatchedIndices(para.text, 0, old_str)
                for i_matchIndex, matchIndex in enumerate(matchedIndices):
                    # make the instance of matchedRunObj
                    diff = len(new_str) - len(old_str)
                    updatedMatchedIndex = i_matchIndex * diff + matchIndex
                    matchedObj = MatchedRunObj(para, updatedMatchedIndex,
                                               old_str, new_str)
                    matchedObj.findRelatedRunIndex()
                    print("indexOfFirstMatchedChar: ",
                          matchedObj.indexOfFirstMatchedChar)
                    print("indexOfLastMatchedChar: ",
                          matchedObj.indexOfLastMatchedChar)
                    matchedObj.replaceString()

        # table processing
        for table in doc.tables:
            for r in table.rows:
                for cell in r.cells:
                    for para in cell.paragraphs:
                        if old_str in para.text:
                            matchedIndices = findMatchedIndices(
                                para.text, 0, old_str)
                            for i_matchIndex, matchIndex in enumerate(
                                    matchedIndices):
                                # make the instance of matchedRunObj
                                diff = len(new_str) - len(old_str)
                                updatedMatchedIndex = i_matchIndex * diff + matchIndex
                                matchedObj = MatchedRunObj(
                                    para, updatedMatchedIndex, old_str,
                                    new_str)
                                matchedObj.findRelatedRunIndex()
                                print("indexOfFirstMatchedChar: ",
                                      matchedObj.indexOfFirstMatchedChar)
                                print("indexOfLastMatchedChar: ",
                                      matchedObj.indexOfLastMatchedChar)
                                matchedObj.replaceString()
        # heading processing

        # save file in here
        doc.save('.\sample\ex2.docx')
    except AttributeError as e:
        errorObj = ErrorHandler(path, e)
        errorObj.showError()
        return errorObj
Example #3
0
 def __retrieve_partition_data(self, topic):
     """
     Returns patition data for given topic.
     :param topic: string, name of topic being searched
     :return: confluent_kafka.TopicPartition()
     """
     if topic not in self.consumer_topic_list:
         raise ErrorHandler(
             "Application does not have access to requested topic: " +
             topic)
     try:
         return self.consumer_topic_list.get(topic).partitions
     except Exception as e:
         raise ErrorHandler("Error retrieving partition data: " + str(e))
Example #4
0
 def convert_avro_msg(self, msg):
     try:
         return self.deserializer.__call__(msg.value(), self.serial_context)
     except Exception as e:
         raise ErrorHandler(
             "Error deserializing avro message. Check registry settings: " +
             str(e))
Example #5
0
 def __init__(self, file_name):
     self.file_name = file_name
     self.stack = Stack()
     self.stack.push("EOF")
     self.stack.push("Source")
     self.parser_table = PARSER_TABLE
     self.symbol_table = OOPSymbolTable()
     self.semantic_stack = Stack()
     self.memory_manager = MemoryManager(1000, 2000)
     self.scanner = Scanner(file_name, self.symbol_table)
     self.next_token = self.scanner.get_next_token()
     self.top_stack = self.stack.top()
     self.rule_number = None
     self.rule = ""
     self.grammar = GRAMMAR
     self.error_handler = ErrorHandler(self.scanner)
     self.symbol_table.set_error_handler(self.error_handler)
     self.semantic_analyzer = SemanticAnalyzer(self.symbol_table,
                                               self.memory_manager,
                                               self.semantic_stack,
                                               self.error_handler)
     self.code_generator = CodeGenerator(self.symbol_table,
                                         self.semantic_stack,
                                         self.memory_manager)
     self.current_identifier = None
     self.follow = FOLLOW
     self.non_terminal = 0
     self.must_get = False
Example #6
0
 def __init__(self, environment):
     self.avro_deserializer = None
     try:
         self.consumer = ConsumerConnectionManager.initialize_kafka_consumer(
             environment)
         try:
             # Retrieve list of topics from KafkaConsumer(). timeout is set as float(10).
             # Timeout errors typically signify a connection error to kafka-broker.
             self.consumer_topic_list = self.consumer.list_topics(
                 timeout=float(10)).topics
         except Exception as e:
             raise ErrorHandler(
                 "Error retrieving list of available topics. Check Kafka connection settings. Error: "
                 + str(e))
     except Exception as e:
         raise ErrorHandler("Error initializing KafkaReader(): " + str(e))
    def search(cls, params):
        response = {}
        # Start connection to queue manager
        try:
            queue_manager = QueueManager()
        except Exception as e:
            raise ErrorHandler(
                "Unable to connect to QueueManager, please try again. Error code: "
                + str(e))

        # Iterate through the search queue list and browse all messages,
        # returning all messages that match the search param
        for name in params.get('queue_list'):
            queue_suffix = params.get('queue_suffix')
            try:
                browser = MQReader(queue_manager, name,
                                   params.get('base_queues'), queue_suffix)
                found_msgs = browser.search_for_msgs(
                    params.get('search_string'), params.get('delimiter'),
                    params.get('not_before'))
                browser.close_browser_connection()
            except Exception as e:
                response["QUEUE___" + name + '.' +
                         queue_suffix] = "queue was unresponsive " + str(e)
                continue
            response["QUEUE___" + name + '.' + queue_suffix] = found_msgs

        # Close connection to queue manager
        try:
            queue_manager.close_manager_connection()
        except Exception as e:
            response[
                'ERROR'] = "ISSUE CLOSING QUEUE MANAGER error code: " + str(e)

        return response
    def __parse_incoming_request(cls, request):
        """
        Parse incoming request convert to dict()
        :return parsed_request dict()
        :rtype dict()
        """
        # Merges form(UI calls), args(Postman form-data) params
        parsed_request = {**request.form.to_dict(), **request.args.to_dict()}

        # Extract json_topics from splash.html if present
        json_topics = request.form.getlist(
            constants.REQUEST_UI_FORM_JSON_TOPICS_KEY)
        if len(json_topics) > 0:
            parsed_request[cls.request_json_topics_key] = json_topics
            parsed_request.pop(constants.REQUEST_UI_FORM_JSON_TOPICS_KEY)

        # Extract avro_topics from splash.html if present
        avro_topics = request.form.getlist(
            constants.REQUEST_UI_FORM_AVRO_TOPICS_KEY)
        if len(avro_topics) > 0:
            parsed_request[cls.request_avro_topics_key] = avro_topics
            parsed_request.pop(constants.REQUEST_UI_FORM_AVRO_TOPICS_KEY)

        # Support for JAVA JSON body requests
        if request.json:
            parsed_request = {**parsed_request, **request.json}

        # Support for POSTMAN raw JSON body requests
        if len(parsed_request) == 0:
            parsed_request = json.loads(request.data.decode())

        if len(parsed_request) == 0:
            raise ErrorHandler("Error parsing params")

        return parsed_request
    def __init__(self, error_callback=None):
        self.error_handler = ErrorHandler(error_callback)

        storage_name = Core.storage.abs_path(
            Core.storage.join_path(Core.bundle_path, 'Contents',
                                   'music.storage'))
        self.music_queue = PlexStorage(storage_name)
    def parse_request(cls, request):
        params = {}
        # Merges form(UI calls), args(Postman form-data) params
        arguments = {**request.form.to_dict(), **request.args.to_dict()}

        # Extract queue_list[] from splash.html if present
        queue_list = request.form.getlist("queue_list[]")
        if len(queue_list) > 0:
            arguments['queue_list'] = queue_list
            arguments.pop("queue_list[]")

        # Support for JAVA JSON body requests
        if request.json:
            arguments = {**arguments, **request.json}

        # Support for POSTMAN raw JSON body requests
        if len(arguments) == 0:
            arguments = json.loads(request.data.decode())

        if len(arguments) == 0:
            raise ErrorHandler("Error parsing params")

        # Extract params from request
        search_string = arguments.get('searchParam', None)
        if not search_string:
            raise ErrorHandler(
                "Required param 'searchParam' not found .... If you are receiving this error in postman "
                "and have included the searchParam, "
                "please uncheck option in Headers Content-type - "
                "application/x-www-form-urlencoded and try again")

        params['search_string'] = search_string.strip().lower()
        params['queue_suffix'] = arguments.get('queueSuffix',
                                               'DEFAULT.QUEUE.SUFFIX').strip()
        params['delimiter'] = arguments.get('includeDelimiter',
                                            'false').strip().lower()
        not_before = arguments.get('notBefore', 'false').strip().lower()
        if not_before != 'false' and not_before != '':
            not_before = cls.convert_not_before(not_before)

        params['not_before'] = not_before
        params['base_queues'] = arguments.get('BASE.QUEUE.PREFIX',
                                              'false').strip().lower()

        params['queue_list'] = arguments.get('queue_list', [])

        return params
Example #11
0
 def __init__(self, environment):
     try:
         self.registry_client = RegistryClient(environment).registry_client
         self.msg_field = MessageField()
         self.deserializer = None
         self.serial_context = None
     except Exception as e:
         raise ErrorHandler("Unable to initialize AvroClient(): " + str(e))
Example #12
0
 def __init__(self, file_name, symbol_table):
     self.symbolTable = symbol_table
     self.currentIndex = 0
     self.startTokenIndex = 0
     self.error_handler = ErrorHandler(self)
     input_file = open(file_name)
     self.inputCode = input_file.read()
     self.lastToken = None
Example #13
0
 def __init__(self, filebase='todos'):
     self.error_handler = ErrorHandler()
     self.io_controller = IoController(filebase + '.csv')
     try:
         self.todos = self.io_controller.load()
     except FileNotFoundError:
         self.todos = []
         self.save()
 def convert_not_before(cls, not_before):
     try:
         # convert notBefore param into 'aware' datetime object
         return pytz.UTC.localize(datetime.fromisoformat(not_before))
     except Exception as e:
         raise ErrorHandler(
             "Error parsing DateTime 'notBefore' -> " + not_before +
             " .  Value must be of format "
             "DateTimeFormatter.ofPattern('yyyy-MM-dd HH:mm:ss') " + str(e))
Example #15
0
 def load_deserializer(self, topic_name):
     try:
         self.deserializer = Deserializer(
             self.registry_client).create_avro_deserializer(topic_name)
         self.serial_context = SerializationContext(topic_name,
                                                    self.msg_field)
     except Exception as e:
         raise ErrorHandler(
             "Unable to load deserializer for topic: " + topic_name +
             ". Please check avro schema provided in avro schemas directory:  "
             + str(e))
    def __stream_comments(self):
        try:
            stream = self.reddit.subreddit(self.settings['subreddit']).stream.comments(skip_existing=True)

            for comment in stream:
                self.logger.info("Parsing comment {}".format(comment.id))
                if self.__parent_is_approvable(comment):
                    self.__approve_parent_submission(comment)

        except Exception as exception:
            ErrorHandler(self, self.logger).handle(exception)
            self.__stream_comments()
Example #17
0
    def validateMetadataFile(self):
        # check if metadata exists
        if not self.checkMetadata():
            return

        # check matadata standard
        standard = MetaInfoStandard.tryDetermineStandard(self.metaProvider)
        if standard != MetaInfoStandard.FGDC:
            QMessageBox.critical(
                self.iface.mainWindow(),
                QCoreApplication.translate("Metatools", "Metatools"),
                QCoreApplication.translate(
                    "Metatools",
                    "Unsupported metadata standard! Only FGDC supported now!"))
            return

        from PyQt4.QtXmlPatterns import QXmlSchema, QXmlSchemaValidator
        # TODO: validate metadata file

        # setup xml schema
        schema = QXmlSchema()

        # setup handler
        self.handler = ErrorHandler(
            QCoreApplication.translate("Metatools", "Metadata is invalid"))
        schema.setMessageHandler(self.handler)

        # load schema from file
        xsdFilePath = self.pluginPath + '/xsd/fgdc/fgdc-std-001-1998.xsd'
        #if standard != MetaInfoStandard.FGDC:
        #    xsdFilePath = 'c:/xsd/gml/basicTypes.xsd' #   gmd/gmd.xsd'
        schemaUrl = QUrl(xsdFilePath)
        loadResult = schema.load(schemaUrl)
        if not loadResult or self.handler.errorOccured:
            QMessageBox.critical(
                self.iface.mainWindow(),
                QCoreApplication.translate("Metatools", "Metatools"),
                QCoreApplication.translate("Metatools",
                                           "Schema for validate not loaded!"))
            return

        #setup validator
        validator = QXmlSchemaValidator(schema)
        validator.setMessageHandler(self.handler)

        #validate
        metadata = self.metaProvider.getMetadata().encode('utf-8')
        if validator.validate(metadata):
            QMessageBox.information(
                self.iface.mainWindow(),
                QCoreApplication.translate("Metatools", "Metatools"),
                QCoreApplication.translate("Metatools", "Metadata is valid!"))
    def __validate_params(cls, params):
        """
        Catch and throw all invalid request exceptions here.
        Current Validations:
        - no search_string included in request
        - no valid topics included in request
        """
        # Validate search_string was included in request
        if not params.get(cls.param_search_string_key):
            raise ErrorHandler(
                "Invalid request. Required param: " +
                cls.request_search_string_key +
                " not found .... If you are receiving this error in postman "
                "and have included the required key, "
                "please uncheck option in Headers Content-type - "
                "application/x-www-form-urlencoded and try again")

        # Validate valid topics was included in request
        if len(params[cls.param_json_topics_key]) == 0 and len(
                params.get(cls.param_avro_topics_key)) == 0:
            raise ErrorHandler(
                "Invalid request. No valid topics selected for search")
Example #19
0
    def __stream_submissions(self):
        try:
            stream = self.reddit.subreddit(
                self.settings['subreddit']).stream.submissions(
                    skip_existing=True)

            for submission in stream:
                self.logger.info("Parsing submission {}".format(submission.id))
                if self.__should_remove_submission(submission):
                    self.logger.info(
                        "Removing submission {} [{}] by /u/{}...".format(
                            submission.id, submission.url, submission.author))
                    self.__handle_removal(submission)

        except Exception as exception:
            ErrorHandler(self, self.logger).handle(exception)
            self.__stream_submissions()
Example #20
0
    def file_split(self, input_file, output_dir, lines_per_file, header):
        smallfile = None
        try:
            input_path, input_f = os.path.split(input_file)
            new_file_name = input_f.split(".")[0]
            new_file_ext = input_f.split(".")[1]
            file_types = self.property_reader.file_types()
            file_type_extn = file_types.split(",")
            if new_file_ext in file_type_extn:
                new_output_dir = output_dir + "/" + new_file_name
                success_dir = self.property_reader.success_dir()
                success_file = success_dir + "/" + input_f
                self.futil.create_dir(new_output_dir)
                self.futil.create_dir(success_dir)
                filecount = 0
                with open(input_file) as bigfile:
                    for line_number, line in enumerate(bigfile):
                        if line_number % lines_per_file == 0:
                            if smallfile:
                                smallfile.close()
                                filecount = filecount + 1
                            small_filename = (new_file_name + '_{}.' +
                                              new_file_ext).format(filecount)
                            smallfile = open(
                                new_output_dir + "/" + small_filename, "w")
                        if line_number % lines_per_file == 0:
                            smallfile.write(header)
                        if line_number > 0:
                            smallfile.write(line)
                    if smallfile:
                        smallfile.close()
                self.futil.move_file(input_file, success_file)
            else:
                print("file extension not supported")

        except:
            failure_dir = self.property_reader.failure_dir()
            failure_file = failure_dir + "/" + input_f
            eh = ErrorHandler()
            eh.error_handle(new_output_dir, failure_file, input_file)
            pass
Example #21
0
    def __init__(self, file_name):
        self.symbol_table = Symbol_table()
        self.scanner = Scanner(file_name, self.symbol_table)
        self.error_handler = ErrorHandler(self.scanner)
        self.semantic_stack = Stack()
        self.last_token = None
        self.memory_manager = MemoryManager(start= 1000)
        self.semantic_analyzer = SemanticAnalyzer(semantic_stack=self.semantic_stack ,
                                                  memory_manager= self.memory_manager ,
                                                  symbol_table= self.symbol_table ,
                                                  error_handler= self.error_handler)
        self.code_generator = Code_generator(symbol_table=self.symbol_table,
                                             semantic_stack=self.semantic_stack,
                                             memory_manager=self.memory_manager)

        self.stack = [0]
        with open('parse_table.csv', 'r') as f:
            self.parse_table = [{k: v for k, v in row.items()}
                                for row in csv.DictReader(f, skipinitialspace=True)]
        self.next_token = None
        self.token_history = []
Example #22
0
 def __build_consumer_dict(cls, environment):
     try:
         config = ConnectionConfig.connection_details
         return {
             'bootstrap.servers': config['bootstrap.servers'][environment],
             'group.id': config.get('group.id'),
             'client.id': config.get('client.id'),
             'enable.auto.commit': True if config.get('enable.auto.commit').lower() == 'true' else False,
             'session.timeout.ms': int(config.get('session.timeout.ms')),
             'default.topic.config': {
                 'auto.offset.reset': config['default.topic.config']['auto.offset.reset']},
             'security.protocol': config.get('security.protocol'),
             'ssl.key.location': config['ssl'][environment]['ssl.key.location'],
             'ssl.key.password': config['ssl'][environment]['ssl.key.password'],
             'ssl.certificate.location': config['ssl'][environment]['ssl.certificate.location'],
             'ssl.ca.location': config['ssl.ca.location'],
             'enable.partition.eof': False if config.get('enable.partition.eof').lower() == 'false' else True,
             'api.version.request': False if config.get('api.version.request').lower() == 'false' else True
         }
     except KeyError as e:
         raise ErrorHandler("Missing required key from main config file. Missing key: " + str(e))
Example #23
0
    def setContent(self, metaProvider, xsltFilePath):
        # load data
        xsltFile = QFile(xsltFilePath)
        xsltFile.open(QIODevice.ReadOnly)
        xslt = unicode(xsltFile.readAll())
        xsltFile.close()

        src = metaProvider.getMetadata()

        # translate
        qry = QXmlQuery(QXmlQuery.XSLT20)

        self.handler = ErrorHandler(self.tr("Translation error"))
        qry.setMessageHandler(self.handler)

        qry.setFocus(src)
        qry.setQuery(xslt)

        result = qry.evaluateToString()

        #workaround, for PyQt < 4.8
        #array = ""
        #buf = QBuffer(array)
        #buf.open(QIODevice.WriteOnly)
        #qry.evaluateTo(buf)
        #result = unicode(array)

        if result:
            #QXmlPattern not support CDATA section
            result = result.replace('&amp;', '&')
            result = result.replace('&gt;', '>')
            result = result.replace('&lt;', '<')

            self.webView.setHtml(result)  # QString.fromUtf8(result))
            return True
        else:
            return False
Example #24
0
from error_handler import ErrorHandler
from input_reader import InputReader
from output_writer import OutputWriter
from trace_manager import TraceManager
from trace_parser import TraceParser
from threading import Thread

error_handler = ErrorHandler()
writer = OutputWriter(error_handler)
trace_manager = TraceManager(writer)
parser = TraceParser(error_handler, trace_manager)
reader = InputReader(parser)

print("Start parsing")
clean_thread = Thread(target=trace_manager.clean_traces)
clean_thread.start()

reader.read()
 def validate_params(cls, params):
     if len(params['queue_list']) == 0:
         raise ErrorHandler(
             "No valid queues selected for search, ensure queue names are in ALL CAPS"
         )
Example #26
0
def main():

    #keep the following lines close to the begining of main because laser is switched on until pin_out.value(1)
    pin_out1 = pyb.Pin('Y1', pyb.Pin.OUT_PP, pull=pyb.Pin.PULL_UP)
    pin_out1.value(not cfg.on_value_out_channel1)
    pin_out2 = pyb.Pin('Y3', pyb.Pin.OUT_PP, pull=pyb.Pin.PULL_UP)
    pin_out2.value(not cfg.on_value_out_channel2)

    #Open drain pins
    pin_out3 = pyb.Pin('Y12', pyb.Pin.OUT_OD)
    pin_out3.value(not cfg.on_value_out_channel3)
    pin_out4 = pyb.Pin('X2', pyb.Pin.OUT_OD)
    pin_out4.value(not cfg.on_value_out_channel4)

    #Amplitude modulation pins
    pin_out5 = pyb.Pin('X5', pyb.Pin.OUT_PP, pull=pyb.Pin.PULL_DOWN)
    pin_out5.value(not cfg.on_value_out_channel5)
    dac5 = pyb.DAC(1)
    dac5.write(not cfg.on_value_out_channel5)
    pin_out6 = pyb.Pin('X6', pyb.Pin.OUT_PP, pull=pyb.Pin.PULL_DOWN)
    pin_out6.value(not cfg.on_value_out_channel6)
    dac6 = pyb.DAC(2)
    dac6.write(not cfg.on_value_out_channel6)

    pin_outLED = pyb.LED(4)

    use_wo_server = False
    global trigger_received

    serial_port = USB_Port()
    pkt = Packet(serial_port)

    armedLED = pyb.LED(3)  #indicates when the system is waiting for a trigger
    triggerLED = pyb.LED(
        2)  #indicates when the system is delivering a sequence

    try:
        file_paths = [
            cfg.library_path + '/' + s
            for s in ospath.listdir_nohidden(cfg.library_path)
        ]
    except OSError:  #if path does not exist listdir raises an OSError
        try:
            file_paths = [
                '/sd/sequence_library/' + s
                for s in ospath.listdir_nohidden('/sd/sequence_library/')
            ]
        except OSError:
            try:
                file_paths = [
                    '/flash/sequence_library/' + s for s in
                    ospath.listdir_nohidden('/flash/sequence_library/')
                ]
            except OSError:
                file_paths = []

    sw = pyb.Switch()

    pyb.LED(2).on()
    pyb.LED(3).on()
    pyb.LED(4).on()

    #try to connect to server
    answer = None
    active = 0
    debounce_time = 20
    double_click_time = 400
    send_repetition = 1000  #Generally 1sec should be enough for the computer to answer.
    reps = 0
    first_push_time = 0
    while not use_wo_server or pyb.elapsed_millis(
            first_push_time) < double_click_time:
        reps += 1
        if reps % send_repetition == 0:
            pkt.send(pkt.INS_check_for_sequences_on_server)
        answer = pkt.receive(time_out=1)
        if answer is not None:
            break
        if sw() == True:
            active += 1
        else:
            active = 0
            continue
        if active == debounce_time:
            if first_push_time == 0:
                first_push_time = pyb.millis()
                use_wo_server = True
            elif pyb.elapsed_millis(first_push_time) < double_click_time:
                open('/flash/bootincopymode', 'a').close()
                pyb.hard_reset()

    pyb.LED(2).off()
    pyb.LED(3).off()
    pyb.LED(4).off()

    if answer == pkt.ANS_yes:
        answer = pkt.ANS_no
        if len(file_paths) >= 1:
            pkt.send(pkt.INS_ask_user)
            answer = pkt.receive()
        if answer == pkt.ANS_yes or len(file_paths) == 0:
            pkt.send(pkt.INS_send_sequences)
            path = ''
            if ospath.exists('/sd'):
                path = '/sd/sequence_library'
            elif ospath.exists('1:'):
                path = '1:/sequence_library'  #older versions of the pyboard use 0:/ and 1:/ instead of /flash and /sd
            elif ospath.exists('/flash'):
                path = '/flash/sequence_library'
            elif ospath.exists('0:'):
                path = '0:/sequence_library'
            if not ospath.exists(path):
                uos.mkdir(path)
            for s in uos.listdir(path):
                uos.remove(path + '/' + s)
            sequence_idx = 0
            rcvd_pkt = pkt.receive()
            while type(rcvd_pkt) == list:
                with open(path + '/sequence' + str(sequence_idx) + '.tsv',
                          'w+') as fp:
                    fp.write(tsv.dumps(rcvd_pkt))
                rcvd_pkt = pkt.receive()
                sequence_idx += 1
            file_paths = [path + '/' + s for s in uos.listdir(path)]
    elif len(file_paths) == 0:
        pkt.send(
            'Error: No sequences found! You can generate sequences using COSgen.'
        )
        raise ValueError(
            'No sequences found on pyboard or server. Copy sequences to the sd card and specify the path in "config.py".'
        )

    storage_path = ''  #path to folder where delivered sequences are stored if not connected to client software on server
    delivered_sequence_idx = 0  #index for naming the sequence files in storage_path
    if use_wo_server:
        if ospath.exists('/sd'):
            if not ospath.exists('/sd/delivered_sequences'):
                uos.mkdir('/sd/delivered_sequences')
            path = '/sd/delivered_sequences/sequences'
        elif ospath.exists('1:'):
            if not ospath.exists('1:/delivered_sequences'):
                uos.mkdir('1:/delivered_sequences')
            path = '1:/delivered_sequences/sequences'
        elif ospath.exists('/flash'):
            if not ospath.exists('/flash/delivered_sequences'):
                uos.mkdir('/flash/delivered_sequences')
            path = '/flash/delivered_sequences/sequences'
        elif ospath.exists('0:'):
            if not ospath.exists('0:/delivered_sequences'):
                uos.mkdir('0:/delivered_sequences')
            path = '0:/delivered_sequences/sequences'

        idx = 0
        while ospath.exists(path + str(idx)):
            idx += 1
        path = path + str(idx)
        uos.mkdir(path)
        storage_path = path

    eh = ErrorHandler(use_wo_server, pkt, storage_path)

    if cfg.accuracy == 'us':
        ticks = utime.ticks_us  #Function for utime.measurment
        sleep = utime.sleep_us  #Corresponding sleep function for ticks
        conversion_factor = 1000000  #converts seconds to the unit specified in cfg.accuracy
    elif cfg.accuracy == 'ms':
        ticks = utime.ticks_ms
        sleep = utime.sleep_ms
        conversion_factor = 1000
    tmax = int(utime.ticks_add(0, -1) / 2)

    extint = pyb.ExtInt('X1', pyb.ExtInt.IRQ_FALLING, pyb.Pin.PULL_DOWN,
                        callback_trigger)
    extint.disable()

    num_seq = len(file_paths)
    pkt.send('Size of sequence library: {0}'.format(num_seq))

    while True:
        seq_index = random.randrange(num_seq)
        with open(file_paths[seq_index]) as f:
            seq = tsv.load(f)
        pkt.send('Current sequence:\n' + tsv.dumps(seq))
        num_of_events = len(seq) - 1
        range_of_events = range(num_of_events - 1)
        onset_column = seq[0].index('onset')
        frequency_column = seq[0].index('frequency')
        duration_column = seq[0].index('duration')
        pulse_width_column = seq[0].index('pulse_width')
        out_channel_column = seq[0].index('out_channel')
        amplitude_column = seq[0].index('amplitude')
        T = [int(1. / seq[1][frequency_column] * conversion_factor)]
        onset = [int(seq[1][onset_column] * conversion_factor)]
        onset_sleep = [onset[0] - onset[0] % tmax]
        num_pulses = [
            round(seq[1][duration_column] * conversion_factor / T[0])
        ]
        pulse_width = [int(seq[1][pulse_width_column] * conversion_factor)]
        pulse_sleep = [pulse_width[0] - pulse_width[0] % tmax]
        if seq[1][out_channel_column] == 1:
            pin_out_func = [pin_out1.value]
            amplitude = [1]
            on_value = [cfg.on_value_out_channel1]
        elif seq[1][out_channel_column] == 2:
            pin_out_func = [pin_out2.value]
            amplitude = [1]
            on_value = [cfg.on_value_out_channel2]
        elif seq[1][out_channel_column] == 3:
            pin_out_func = [pin_out3.value]
            amplitude = [1]
            on_value = [cfg.on_value_out_channel3]
        elif seq[1][out_channel_column] == 4:
            pin_out_func = [pin_out4.value]
            amplitude = [1]
            on_value = [cfg.on_value_out_channel4]
        elif seq[1][out_channel_column] == 5:
            pin_out_func = [dac5.write]
            amplitude = [int(seq[1][amplitude_column] * 255)]
            on_value = [cfg.on_value_out_channel5]
        elif seq[1][out_channel_column] == 6:
            pin_out_func = [dac6.write]
            amplitude = [int(seq[1][amplitude_column] * 255)]
            on_value = [cfg.on_value_out_channel6]
        else:
            raise SequenceError(
                'Invalide sequence {0}. Unrecognized out channel {1}.\n'.
                format(file_paths[seq_index], seq[1][out_channel_column]))

        if T[0] < seq[1][pulse_width_column] * conversion_factor:
            raise SequenceError(
                "Invalid sequence {0}. Period is smaller than pulse width.\n".
                format(file_paths[seq_index]))
        for i in range(2, num_of_events):
            T.append(int(1. / seq[i][frequency_column] * conversion_factor))
            onset.append(int(seq[i][onset_column] * conversion_factor))
            onset_sleep.append(onset[i - 1] - onset[i - 2] -
                               (onset[i - 1] - onset[i - 2]) % tmax)
            num_pulses.append(
                round(seq[i][duration_column] * conversion_factor / T[i - 1]))
            pulse_width.append(
                int(seq[i][pulse_width_column] * conversion_factor))
            pulse_sleep.append(pulse_width[i - 1] - pulse_width[i - 1] % tmax)
            if seq[i][out_channel_column] == 1:
                pin_out_func.append(pin_out1.value)
                amplitude.append(1)
                on_value.append(cfg.on_value_out_channel1)
            elif seq[i][out_channel_column] == 2:
                pin_out_func.append(pin_out2.value)
                amplitude.append(1)
                on_value.append(cfg.on_value_out_channel2)
            elif seq[i][out_channel_column] == 3:
                pin_out_func.append(pin_out3.value)
                amplitude.append(1)
                on_value.append(cfg.on_value_out_channel3)
            elif seq[i][out_channel_column] == 4:
                pin_out_func.append(pin_out4.value)
                amplitude.append(1)
                on_value.append(cfg.on_value_out_channel4)
            elif seq[i][out_channel_column] == 5:
                pin_out_func.append(dac5.write)
                amplitude.append(int(seq[i][amplitude_column] * 255))
                on_value.append(cfg.on_value_out_channel5)
            elif seq[i][out_channel_column] == 6:
                pin_out_func.append(dac6.write)
                amplitude.append(int(seq[i][amplitude_column] * 255))
                on_value.append(cfg.on_value_out_channel6)
            else:
                raise SequenceError(
                    'Invalide sequence {0}. Unrecognized out channel {1}.\n'.
                    format(file_paths[seq_index], seq[i][out_channel_column]))
            if T[i - 1] < seq[i][pulse_width_column] * conversion_factor:
                raise SequenceError(
                    "Invalid sequence {0}. Period is smaller than pulse width.\n"
                    .format(file_paths[seq_index]))

        pkt.send('Ready to be armed!')
        trigger_received = False
        sw.callback(None)
        active = 0
        while active < 20:
            if sw() == True:
                active += 1
            else:
                active = 0
            pyb.delay(1)
        pyb.delay(200)
        armedLED.on()
        pkt.send('System armed!')

        sw.callback(callback_trigger2
                    )  #for test purposes the switch can be used to trigger
        extint.enable()
        while not trigger_received:
            utime.sleep_us(1)

        start_ticks = ticks()
        triggerLED.on()
        armedLED.off()
        extint.disable()
        pkt.send('Trigger received!')
        for i in range_of_events:
            sleep(onset_sleep[i])
            scheduled_time = utime.ticks_add(start_ticks, onset[i])
            pulse = 0
            while pulse < num_pulses[i]:
                if utime.ticks_diff(ticks(), scheduled_time) < 0:
                    sleep(utime.ticks_diff(scheduled_time, ticks()))
                    deliver_pulse(pin_out_func[i], amplitude[i],
                                  pulse_width[i], pulse_sleep[i], pin_outLED,
                                  eh, ticks, sleep, on_value[i])
                elif utime.ticks_diff(ticks(), scheduled_time) == 0:
                    deliver_pulse(pin_out_func[i], amplitude[i],
                                  pulse_width[i], pulse_sleep[i], pin_outLED,
                                  eh, ticks, sleep, on_value[i])
                elif utime.ticks_diff(ticks(), scheduled_time) > 0:
                    now = ticks()
                    deliver_pulse(pin_out_func[i], amplitude[i],
                                  pulse_width[i], pulse_sleep[i], pin_outLED,
                                  eh, ticks, sleep, on_value[i])
                    eh.send(
                        "Missed scheduled onset time of pulse in event {0} by {1} {2} "
                        .format(i, utime.ticks_diff(now, scheduled_time),
                                cfg.accuracy))
                scheduled_time = utime.ticks_add(scheduled_time, T[i])
                pulse += 1
        if not use_wo_server:
            pkt.send(seq)
        else:
            eh.save()
            with open(
                    storage_path + '/sequence' + str(delivered_sequence_idx) +
                    '.tsv', 'w+') as fp:
                fp.write(tsv.dumps(seq))
            delivered_sequence_idx += 1
        triggerLED.off()
Example #27
0
    def search_for_msgs(self, request_params, topic, message_type):
        """
        Method handles browsing of request topic's partitions and passes messages to given message type's parser.
        Generic parsers (json and avro) and provided below.  You may add additional parsers to fit your needs
        :param request_params: dict()
        :param topic: string, name of topic being search
        :param message_type: string, type of messages being polled and parses
        :return: list() of all messages in given topic that include request search_string
        """
        messages = []
        partitions = self.__retrieve_partition_data(topic)
        for k, partition in partitions.items():
            self.consumer.assign([TopicPartition(topic, partition.id)])
            while True:
                try:
                    msg = self.consumer.poll(.5)
                    if msg is None:
                        continue
                    elif not msg.error():
                        parsed_msg = None
                        # Add more message types here if desired. out of box only provided json and avro types
                        if message_type == 'json':
                            parsed_msg = self.__parse_json_msg(
                                request_params, msg)
                        elif message_type == 'avro':
                            parsed_msg = self.__parse_avro_msg(
                                request_params, msg)
                        if parsed_msg:
                            messages.insert(0, parsed_msg)
                            if request_params.get(
                                    self.param_include_delimiter_key
                            ) == 'true':
                                messages.insert(
                                    1,
                                    "##############################################################"
                                )
                                messages.insert(
                                    2,
                                    "####################  MESSAGE SEPARATOR  #####################"
                                )
                                messages.insert(
                                    3,
                                    "##############################################################"
                                )

                    elif msg.error():
                        # If Kafka end of partition error received,
                        # unassign consumer from partition and continue iteration.
                        if msg.error().code() == KafkaError._PARTITION_EOF:
                            self.consumer.unassign()
                            break
                        else:
                            # currently not processing message errors, other than KafkaError.PARTITION_EOF.
                            # If desired, add this logic here.
                            error_msg_received = msg.error()
                            continue
                except ErrorHandler as e:
                    raise ErrorHandler("Error parsing message. " + str(e))

                # Ignore generic exceptions, as there may be malformed messages in topic.
                # If you would like an exception thrown please add here.
                except Exception as e:
                    continue

        return messages
Example #28
0
 def __init__(self, kp=7.0, Ti=155.0, Td=0.001):
     self._kp = kp
     self._Ti = Ti
     self._Td = Td
     self.error_handler_inst = ErrorHandler()
     self._control_signal = 0.0
Example #29
0
 def initialize_kafka_consumer(cls, environment):
     try:
         settings = cls.__build_consumer_dict(environment)
         return Consumer(settings)
     except Exception as e:
         raise ErrorHandler("Error initializing Kafka Consumer: " + str(e))
 def _error_handler_default(self):
     eh = ErrorHandler()
     eh.logger = self
     return eh