Exemple #1
0
    def Preprocess(self,
                   artifacts_registry_object,
                   source_path_specs,
                   resolver_context=None):
        detected_operating_systems = []
        for source_path_spec in source_path_specs:
            if source_path_spec.IsFileSystem():
                try:
                    file_system, mount_point = self._GetSourceFileSystem(
                        source_path_spec, resolver_context=resolver_context)
                except (RuntimeError, dfvfs_errors.BackEndError) as exception:
                    logger.error(exception)
                    continue

                try:
                    searcher = file_system_searcher.FileSystemSearcher(
                        file_system, mount_point)

                    operating_system = self._DetermineOperatingSystem(searcher)
                    if operating_system != definitions.OPERATING_SYSTEM_FAMILY_UNKNOWN:
                        preprocess_manager.PreprocessPluginsManager.RunPlugins(
                            artifacts_registry_object, file_system,
                            mount_point, self.knowledge_base)

                    detected_operating_systems.append(operating_system)

                finally:
                    file_system.Close()

        if detected_operating_systems:
            logger.info(
                'Preprocessing detected operating systems: {0:s}'.format(
                    ', '.join(detected_operating_systems)))
            self.knowledge_base.SetValue('operating_system',
                                         detected_operating_systems)
Exemple #2
0
    def accepted(self):
        logger.info("setting accepted....")

        # 设置棋手或AI
        self.engine_side = []
        self.human_side = []

        if self.settings.redside.currentIndex() == 0:
            self.human_side.append(Chess.RED)
        else:
            self.engine_side.append(Chess.RED)

        if self.settings.blackside.currentIndex() == 0:
            self.engine_side.append(Chess.BLACK)
        else:
            self.human_side.append(Chess.BLACK)

        idx = self.settings.red_engine.currentIndex()
        engine = self.engines[Chess.RED]
        if not isinstance(engine, UCCI_ENGINES[idx]):
            self.init_engines(Chess.RED)

        idx = self.settings.black_engine.currentIndex()
        engine = self.engines[Chess.BLACK]
        if not isinstance(engine, UCCI_ENGINES[idx]):
            self.init_engines(Chess.BLACK)

        if len(self.engine_side) == 2:
            self.method.list.setEnabled(False)
        else:
            self.method.list.setEnabled(True)
        self.update_action_state()

        self.try_engine_move()
Exemple #3
0
 def stop(self):
     try:
         if self.__thread is not None and self.__thread.is_alive():
             logger.info("Shutting down client.")
             self.__stream.disconnect()
     except Exception, e:
         logger.error("Error disconnecting stream: %s." % (str(e)))
    def start(self):
        """
        Start the connections to the message queue
        """

        logger.info("Starting the checker service ...")
        # Start the connections to the message queue
        self.hs.run()
Exemple #5
0
 def __threadMain(self):
     try:
         logger.info("Initializing client.")
         self.__stream.filter(track=self.__track,
                              follow=self.__follow,
                              languages=self.__languages)
     finally:
         logger.info("Client finished.")
         self.__running = False
    def online_data_callback(self, channel, msg):
        """
        Online Data Callback

        An empty (abstract) function that needs to be overrided if you want to get the online 
        real-time data to do some further process, like monitoring the real-time data.
        """

        logger.info("[Online Data] channel: %s, msg: %s" % (channel, msg))
Exemple #7
0
def build_feed(instruments,
               fromYear,
               toYear,
               storage,
               frequency=bar.Frequency.DAY,
               timezone=None,
               skipErrors=False):
    """Build and load a :class:`engine.barfeed.yahoofeed.Feed` using CSV files downloaded from Yahoo! Finance.
    CSV files are downloaded if they haven't been downloaded before.

    :param instruments: Instrument identifiers.
    :type instruments: list.
    :param fromYear: The first year.
    :type fromYear: int.
    :param toYear: The last year.
    :type toYear: int.
    :param storage: The path were the files will be loaded from, or downloaded to.
    :type storage: string.
    :param frequency: The frequency of the bars. Only **engine.bar.Frequency.DAY** or **engine.bar.Frequency.WEEK**
        are supported.
    :param timezone: The default timezone to use to localize bars. Check :mod:`engine.marketsession`.
    :type timezone: A pytz timezone.
    :param skipErrors: True to keep on loading/downloading files in case of errors.
    :type skipErrors: boolean.
    :rtype: :class:`engine.barfeed.yahoofeed.Feed`.
    """

    logger = engine.logger.getLogger("yahoofinance")
    ret = yahoofeed.Feed(frequency, timezone)

    if not os.path.exists(storage):
        logger.info("Creating %s directory" % (storage))
        os.mkdir(storage)

    for year in range(fromYear, toYear + 1):
        for instrument in instruments:
            fileName = os.path.join(
                storage, "%s-%d-yahoofinance.csv" % (instrument, year))
            if not os.path.exists(fileName):
                logger.info("Downloading %s %d to %s" %
                            (instrument, year, fileName))
                try:
                    if frequency == bar.Frequency.DAY:
                        download_daily_bars(instrument, year, fileName)
                    elif frequency == bar.Frequency.WEEK:
                        download_weekly_bars(instrument, year, fileName)
                    else:
                        raise Exception("Invalid frequency")
                except Exception, e:
                    if skipErrors:
                        logger.error(str(e))
                        continue
                    else:
                        raise e
            ret.addBarsFromCSV(instrument, fileName)
    def check(self, payload_type, payload, lib_info):
        """
        Check Rules: Overriding of the check in class Checker

        check function would parse the content of lib_info to get the rule information, with the 
        help of that, it would determine how to check the value of the payload of the real-time  
        sensor data and whether to publish a notification or not. 
        """

        logger.info("Received payload: %s,\tpayload type: %s" %
                    (payload, payload_type))
        logger.info ("Rule's type: %s,\toperator: %s,\ttarget value:%s" % (\
                self.rule_type_enum_map[lib_info["rule_type"]], \
                self.rule_op_enum_map[lib_info["rule_op"]], \
                lib_info["rule_obj"]))
        # Get payload value
        payload_value = self.get_payload_value(payload, payload_type)

        # Parse lib_info (rule)
        # 1. [Rule type]: "value", "sensor", "trd_party"
        #    <value>     means the rule would compare the payload with a indicated value by an
        #                indicated operator.
        #    <sensor>    means the rule would compare the payload with the payload of real-time
        #                data of another indicated sensor by an indicated operator.
        #    <trd_party> means the rule would compare the payload with a trd party data source
        #                by an indicated operator.
        rule_type = self.rule_type_enum_map[lib_info["rule_type"]]
        if rule_type == "value":
            self.target_val = float(lib_info["rule_obj"])
        elif rule_type == "sensor":
            self.target_sensorid = str(lib_info["rule_obj"])
        elif rule_type == "trd_party":
            self.target_3rd = lib_info["rule_obj"]
            self.target_val = self.get_3rd_value(self.target_3rd)
        else:
            raise Exception("Invalid rule type: %s." % lib_info["rule_type"])

        # 2. [Rule Operator]: "gt", "lt", "ge", "le", "eq"
        operator = self.rule_op_enum_map[lib_info["rule_op"]]
        if (self.target_val != None) and (\
            (operator == "gt" and payload_value > self.target_val) or \
            (operator == "lt" and payload_value < self.target_val) or \
            (operator == "ge" and payload_value >= self.target_val) or \
            (operator == "le" and payload_value <= self.target_val) or \
            (operator == "eq" and payload_value == self.target_val)):
            response = {
                "lib_info": lib_info,
                "payload_val": payload_value,
                "target_val": self.target_val,
                "timestamp": str(arrow.now())
            }
            return True, response
        else:
            return False, None
    def stop(self):
        """
        Stop the connections to the message queue
        """

        logger.info("Stopping the checker service ...")
        # Stop the connections to the message queue
        # TODO: currently, it is an invalid method to stop the connection. I have to ask Zhiyi how
        # do this job correctly.
        self.hs.__del__()
        self.hp.__del__()
    def check(self, payload_type, payload, lib_info):
        """
        Check

        An empty (abstract) function that needs to be overrided to check if the payload conformed 
        to the constrains in the library information (lib_info). If it did, then assert it to be 
        true and return a response that would be published to the notification channel. 
        """

        logger.info ("Please override this function. Payload: %s, payload type: %s, library info: %s" % \
            (payload, payload_type, lib_info))
        return False, None
Exemple #11
0
 def save(self):
     dialog = QtWidgets.QFileDialog(self)
     dialog.setFileMode(QtWidgets.QFileDialog.AnyFile)
     filename = dialog.getSaveFileName(
         self, "保存中国象棋文件 Fen", ".", "文件 (*.fen)")[0]
     if not filename:
         return
     fen = self.engine.sit.format_fen()
     with open(filename, 'w', encoding='utf8') as file:
         file.write('fen ')
         file.write(fen)
     logger.info("save file %s - fen %s", filename, fen)
    def get_payload_value(self, payload, payload_type):
        """
        Get Paylaod Value

        The funciton parses the payload to get the value of the sensor data. 
        """

        if self.payload_enum_map[payload_type] == "number":
            return float(payload)
        # TODO: add parsing process for "gps", "diag" and "log"
        else:
            logger.info("Unsupported payload type: %s" %
                        self.payload_enum_map[str(payload_type)])
            return None
Exemple #13
0
    def pushJobResults(self, jobId, result, parameters, workerName):
        jobId = pickle.loads(jobId)
        result = pickle.loads(result)
        parameters = pickle.loads(parameters)

        # Remove the job mapping.
        with self.__activeJobsLock:
            try:
                del self.__activeJobs[jobId]
            except KeyError:
                # The job's results were already submitted.
                return

        if result is None or result > self.__bestResult:
            logger.info("Best result so far %s with parameters %s" % (result, parameters))
            self.__bestResult = result

        self.__resultSinc.push(result, base.Parameters(*parameters))
Exemple #14
0
    def pushJobResults(self, jobId, result, parameters, workerName):
        jobId = pickle.loads(jobId)
        result = pickle.loads(result)
        parameters = pickle.loads(parameters)

        # Remove the job mapping.
        with self.__activeJobsLock:
            try:
                del self.__activeJobs[jobId]
            except KeyError:
                # The job's results were already submitted.
                return

        if result is None or result > self.__bestResult:
            logger.info("Best result so far %s with parameters %s" %
                        (result, parameters))
            self.__bestResult = result

        self.__resultSinc.push(result, base.Parameters(*parameters))
Exemple #15
0
    def serve(self):
        try:
            # Initialize instruments, bars and parameters.
            logger.info("Loading bars")
            loadedBars = []
            for dateTime, bars in self.__barFeed:
                loadedBars.append(bars)
            instruments = self.__barFeed.getRegisteredInstruments()
            self.__instrumentsAndBars = pickle.dumps((instruments, loadedBars))
            self.__barsFreq = self.__barFeed.getFrequency()

            if self.__autoStopThread:
                self.__autoStopThread.start()

            logger.info("Waiting for workers")
            self.serve_forever()

            if self.__autoStopThread:
                self.__autoStopThread.join()
        finally:
            self.__forcedStop = True
Exemple #16
0
    def serve(self):
        try:
            # Initialize instruments, bars and parameters.
            logger.info("Loading bars")
            loadedBars = []
            for dateTime, bars in self.__barFeed:
                loadedBars.append(bars)
            instruments = self.__barFeed.getRegisteredInstruments()
            self.__instrumentsAndBars = pickle.dumps((instruments, loadedBars))
            self.__barsFreq = self.__barFeed.getFrequency()

            if self.__autoStopThread:
                self.__autoStopThread.start()

            logger.info("Waiting for workers")
            self.serve_forever()

            if self.__autoStopThread:
                self.__autoStopThread.join()
        finally:
            self.__forcedStop = True
Exemple #17
0
def serve(barFeed, strategyParameters, address, port):
    """Executes a server that will provide bars and strategy parameters for workers to use.

    :param barFeed: The bar feed that each worker will use to backtest the strategy.
    :type barFeed: :class:`engine.barfeed.BarFeed`.
    :param strategyParameters: The set of parameters to use for backtesting. An iterable object where **each element is a tuple that holds parameter values**.
    :param address: The address to listen for incoming worker connections.
    :type address: string.
    :param port: The port to listen for incoming worker connections.
    :type port: int.
    :rtype: A :class:`Results` instance with the best results found or None if no results were obtained.
    """

    paramSource = base.ParameterSource(strategyParameters)
    resultSinc = base.ResultSinc()
    s = xmlrpcserver.Server(paramSource, resultSinc, barFeed, address, port)
    logger.info("Starting server")
    s.serve()
    logger.info("Server finished")

    ret = None
    bestResult, bestParameters = resultSinc.getBest()
    if bestResult is not None:
        logger.info("Best final result %s with parameters %s" % (bestResult, bestParameters.args))
        ret = Results(bestParameters.args, bestResult)
    else:
        logger.error("No results. All jobs failed or no jobs were processed.")
    return ret
Exemple #18
0
def serve(barFeed, strategyParameters, address, port):
    """Executes a server that will provide bars and strategy parameters for workers to use.

    :param barFeed: The bar feed that each worker will use to backtest the strategy.
    :type barFeed: :class:`engine.barfeed.BarFeed`.
    :param strategyParameters: The set of parameters to use for backtesting. An iterable object where **each element is a tuple that holds parameter values**.
    :param address: The address to listen for incoming worker connections.
    :type address: string.
    :param port: The port to listen for incoming worker connections.
    :type port: int.
    :rtype: A :class:`Results` instance with the best results found or None if no results were obtained.
    """

    paramSource = base.ParameterSource(strategyParameters)
    resultSinc = base.ResultSinc()
    s = xmlrpcserver.Server(paramSource, resultSinc, barFeed, address, port)
    logger.info("Starting server")
    s.serve()
    logger.info("Server finished")

    ret = None
    bestResult, bestParameters = resultSinc.getBest()
    if bestResult is not None:
        logger.info("Best final result %s with parameters %s" %
                    (bestResult, bestParameters.args))
        ret = Results(bestParameters.args, bestResult)
    else:
        logger.error("No results. All jobs failed or no jobs were processed.")
    return ret
    def sub_callback(self, channel, msg):
        """
        Overriding of the callback function of interfaces.Subscriber

        This method receives real-time data stream from the data channel, meanwhile it also 
        retrieves the corresponding library item by the indicated sensor id. Abstract private  
        method 'check' would be invoked to validate the content (payload) of the current real-time
        data if it conformed to any of its constrains (rules in libray). Finally, this method
        would publish a notification to notification channel if anyone of the constrains has 
        been triggerred.
        """

        logger.info(msg)
        logger.info("\nReceived data from sensor: %s" % msg["sensor_id"])

        # TODO: Only check one specific user's rules which would be indicated by the passing
        # email and password

        payload_type = msg["data_type"]  # Payload type
        payload = msg["payload"]  # Payload field of real-time data stream
        lib_info = self.dao[
            msg["sensor_id"]]  # Rules/Feature Library Information

        # check if there is a existed rule for the current sensor
        if not bool(lib_info):
            logger.info("There is no library item for sensor Id: %s" %
                        msg["sensor_id"])
            return

        # Check the data by its payload and its corresponding library information
        assertion, response_msg = self.check(payload_type, payload, lib_info)
        if assertion:
            # TODO: Save the notification message to database
            if lib_info["is_push"]:
                # Push the notification to the queue if the rule check has been passed
                self.push_notification(response_msg)

        # Online data callback for doing other data processing based on the real-time data stream
        self.online_data_callback(channel, msg)
Exemple #20
0
 def on_connect(self):
     logger.info("Connected.")
Exemple #21
0
def build_feed(sourceCode, tableCodes, fromYear, toYear, storage, frequency=bar.Frequency.DAY, timezone=None,
               skipErrors=False, noAdjClose=False, authToken=None, columnNames={}, forceDownload=False
               ):
    """Build and load a :class:`engine.barfeed.quandlfeed.Feed` using CSV files downloaded from Quandl.
    CSV files are downloaded if they haven't been downloaded before.

    :param sourceCode: The dataset source code.
    :type sourceCode: string.
    :param tableCodes: The dataset table codes.
    :type tableCodes: list.
    :param fromYear: The first year.
    :type fromYear: int.
    :param toYear: The last year.
    :type toYear: int.
    :param storage: The path were the files will be loaded from, or downloaded to.
    :type storage: string.
    :param frequency: The frequency of the bars. Only **engine.bar.Frequency.DAY** or **engine.bar.Frequency.WEEK**
        are supported.
    :param timezone: The default timezone to use to localize bars. Check :mod:`engine.marketsession`.
    :type timezone: A pytz timezone.
    :param skipErrors: True to keep on loading/downloading files in case of errors.
    :type skipErrors: boolean.
    :param noAdjClose: True if the instruments don't have adjusted close values.
    :type noAdjClose: boolean.
    :param authToken: Optional. An authentication token needed if you're doing more than 50 calls per day.
    :type authToken: string.
    :param columnNames: Optional. A dictionary to map column names. Valid key values are:

        * datetime
        * open
        * high
        * low
        * close
        * volume
        * adj_close

    :type columnNames: dict.

    :rtype: :class:`engine.barfeed.quandlfeed.Feed`.
    """

    logger = engine.logger.getLogger("quandl")
    ret = quandlfeed.Feed(frequency, timezone)
    if noAdjClose:
        ret.setNoAdjClose()

    # Additional column names.
    for col, name in columnNames.iteritems():
        ret.setColumnName(col, name)

    if not os.path.exists(storage):
        logger.info("Creating %s directory" % (storage))
        os.mkdir(storage)

    for year in range(fromYear, toYear+1):
        for tableCode in tableCodes:
            fileName = os.path.join(storage, "%s-%s-%d-quandl.csv" % (sourceCode, tableCode, year))
            if not os.path.exists(fileName) or forceDownload:
                logger.info("Downloading %s %d to %s" % (tableCode, year, fileName))
                try:
                    if frequency == bar.Frequency.DAY:
                        download_daily_bars(sourceCode, tableCode, year, fileName, authToken)
                    elif frequency == bar.Frequency.WEEK:
                        download_weekly_bars(sourceCode, tableCode, year, fileName, authToken)
                    else:
                        raise Exception("Invalid frequency")
                except Exception, e:
                    if skipErrors:
                        logger.error(str(e))
                        continue
                    else:
                        raise e
            ret.addBarsFromCSV(tableCode, fileName)
Exemple #22
0
def build_feed(sourceCode,
               tableCodes,
               fromYear,
               toYear,
               storage,
               frequency=bar.Frequency.DAY,
               timezone=None,
               skipErrors=False,
               noAdjClose=False,
               authToken=None,
               columnNames={},
               forceDownload=False):
    """Build and load a :class:`engine.barfeed.quandlfeed.Feed` using CSV files downloaded from Quandl.
    CSV files are downloaded if they haven't been downloaded before.

    :param sourceCode: The dataset source code.
    :type sourceCode: string.
    :param tableCodes: The dataset table codes.
    :type tableCodes: list.
    :param fromYear: The first year.
    :type fromYear: int.
    :param toYear: The last year.
    :type toYear: int.
    :param storage: The path were the files will be loaded from, or downloaded to.
    :type storage: string.
    :param frequency: The frequency of the bars. Only **engine.bar.Frequency.DAY** or **engine.bar.Frequency.WEEK**
        are supported.
    :param timezone: The default timezone to use to localize bars. Check :mod:`engine.marketsession`.
    :type timezone: A pytz timezone.
    :param skipErrors: True to keep on loading/downloading files in case of errors.
    :type skipErrors: boolean.
    :param noAdjClose: True if the instruments don't have adjusted close values.
    :type noAdjClose: boolean.
    :param authToken: Optional. An authentication token needed if you're doing more than 50 calls per day.
    :type authToken: string.
    :param columnNames: Optional. A dictionary to map column names. Valid key values are:

        * datetime
        * open
        * high
        * low
        * close
        * volume
        * adj_close

    :type columnNames: dict.

    :rtype: :class:`engine.barfeed.quandlfeed.Feed`.
    """

    logger = engine.logger.getLogger("quandl")
    ret = quandlfeed.Feed(frequency, timezone)
    if noAdjClose:
        ret.setNoAdjClose()

    # Additional column names.
    for col, name in columnNames.iteritems():
        ret.setColumnName(col, name)

    if not os.path.exists(storage):
        logger.info("Creating %s directory" % (storage))
        os.mkdir(storage)

    for year in range(fromYear, toYear + 1):
        for tableCode in tableCodes:
            fileName = os.path.join(
                storage, "%s-%s-%d-quandl.csv" % (sourceCode, tableCode, year))
            if not os.path.exists(fileName) or forceDownload:
                logger.info("Downloading %s %d to %s" %
                            (tableCode, year, fileName))
                try:
                    if frequency == bar.Frequency.DAY:
                        download_daily_bars(sourceCode, tableCode, year,
                                            fileName, authToken)
                    elif frequency == bar.Frequency.WEEK:
                        download_weekly_bars(sourceCode, tableCode, year,
                                             fileName, authToken)
                    else:
                        raise Exception("Invalid frequency")
                except Exception, e:
                    if skipErrors:
                        logger.error(str(e))
                        continue
                    else:
                        raise e
            ret.addBarsFromCSV(tableCode, fileName)