Ejemplo n.º 1
0
    def __init__(self):

        self._tl = Timeloop()

        self._jobs = dict()

        self.api = TinaAPI(self)
Ejemplo n.º 2
0
def main(hparams):

    metagraph = Metagraph(hparams)
    neuron = Neuron(hparams, metagraph)

    server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
    bittensor.proto.bittensor_pb2_grpc.add_BittensorServicer_to_server(neuron, server)
    server.add_insecure_port(hparams.bind_address + ":" + hparams.port)
    server.start()

    tl = Timeloop()
    set_timed_loops(tl, hparams, neuron, metagraph)
    tl.start(block=False)
    logger.info('Started Timers.')

    try:
        logger.info('Begin wait on main...')
        while True:
            logger.debug('heartbeat')
            time.sleep(100)

    except KeyboardInterrupt:
        logger.debug('Neuron stopped with keyboard interrupt.')
        server.stop()
        del neuron
        del metagraph

    except Exception as e:
        logger.error('Neuron stopped with interrupt on error: ' + str(e))
        server.stop()
        del neuron
        del metagraph
Ejemplo n.º 3
0
 def __init__(self,t1,t2,init_val=None):
     Timeloop.__init__(self)
     self.jobs=[None, None] #Timeloop.jobs is a list
     self.c=0
     self.set_main_job(self.fun1, t1)
     self.set_selfcheck_job(self.fun2, t2, 'werld')
     if init_val:
         #do routine
         self.fun2(init_val)
Ejemplo n.º 4
0
class Server(object):
    def __init__(self):

        self._tl = Timeloop()

        self._jobs = dict()

        self.api = TinaAPI(self)

    def run_api(self):
        self.api.start()

    @property
    def jobs(self):
        return self._jobs

    def register_job(self, job: Job) -> None:
        """
        Raises:
              ValueError: If job has already been registered
        """

        if self.job_exists(job):
            raise ValueError("The job {} has already been registered.".format(
                job.name))
        else:
            self._jobs[job.name] = JobContainer(
                job=job,
                job_start_timestamp=time.ctime(),
                job_interval=job.interval)

        interval = job.interval

        @self._tl.job(interval=interval)
        def wrapper() -> None:
            job.last_execution = time.ctime()
            job.run()

    def run(self) -> None:
        self._tl.start()

    def stop(self) -> None:
        self._tl.stop()

    def job_exists(self, job: Job) -> bool:
        if job.__class__.__name__ in self._jobs:
            return True
        else:
            return False

    def manual_trigger(self, job_name: str) -> bool:
        # blocking
        if job_name not in self.jobs:
            return False
        else:
            self.jobs[job_name].job.run()
            return True
Ejemplo n.º 5
0
    def _run_eviction(self):
        tl = Timeloop()

        @tl.job(interval=self.eviction_interval)
        def eviction():
            for shard in self.shards:
                shard.run_eviction()

        tl.start()
        self.tl = tl
Ejemplo n.º 6
0
    def __init__(self, config: Config, signals: np.ndarray, output = None, interval = 1000, new_reference_signal_each_k = 5, max_iterations = 1200, error_on = False, error_start = 0, error_duration = 10, network_id = 2):
        """
            Creates a Server object using:
            str:host\t host address
            int:port\t port where server runs
            int:id\t ID of the Server (used in numpy to locate position in matrix)
            np.array: adjacency\t Adjacency matrix of the whole/sub system
            int:signal\t Initial value for the node to start
            dict:out_neighbors\t contains all outneighbor host addresses in key property
            bool: instant_start\t whether the server should start immediately

            Returns: Server(object).
        """

        logger = logging.getLogger(name='Server.__init__')

        manager = Manager() # used to synchronize data between processes and threads
        self._host = config.host
        self._port = config.port
        self.__address = (config.host, config.port)
        self._adjacency = config.adjacency
        self._id = config.id
        self._laplacian = utility.calculate_laplacian(self._adjacency)
        if not utility.check_laplacian(self._laplacian):
            raise BaseException("No valid laplacian")
        # self._beta = 1/np.max(np.linalg.eigvals(self._laplacian)) # calculates beta, moved to utility
        self._beta = utility.calculate_beta(self._laplacian)
        self._server_socket = socket(AF_INET, SOCK_STREAM)
        self._server_socket.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
        self._server_socket.bind(self.__address)
        self._j = J(0, signals[0], config.out_neighbors, manager)
        self._neighbor_states = manager.dict()
        self._interval = interval
        self._interval_sec = float(interval / 1000.0)
        self.__signals = signals
        self._new_reference_signal_each_k = new_reference_signal_each_k
        self.__output = output
        self.__max_iterations = max_iterations
        self.__running = False
        self.__neighbor_out_connections = manager.dict()
        self.__error_on = error_on
        self.__error_start = error_start
        self.__error_duration = error_duration
        self.__API_URL = 'http://10.0.2.2:7071'
        self.__NETWORK_ID = network_id
        self.__API_QUEUE = manager.list()
        self.__API_QUEUE_LOCK = Lock()
        
        self.__tl = Timeloop() # allows to start recurring threads in a given time interval
        if config.instant_start:
            self.start()
        # logger.debug(self)
        logger.warn(f"Using beta {self._beta:24.20f}")
Ejemplo n.º 7
0
def register_virtual_device(socketio):
    """
    Register the virtual device callback to get invoked periodically.
    :param socketio:
    :return:
    """
    tl = Timeloop()
    tl._add_job(send_event, interval=timedelta(seconds=1), args={'socketio': socketio})
    # @tl.job(interval=timedelta(seconds=1))
    # def on_e_bike_event_received():
    #     return send_event(socketio)
    return tl.start()
Ejemplo n.º 8
0
    def __init__(self, tree, interval=60):
        """sets up the async event loop
        Parameters:
        tree - the same merkle tree instance being anchored
        interval - the optional parameter for setting the time interval of the anchor.

        Returns: an object that tracks the current phase, spawns subtasks
        """
        self.tree = tree
        self.loop = Timeloop()
        self.chain = Chain()
        self.interval = interval
        logger.info("The reactor was started at time: {time}",
                    time=datetime.timestamp(datetime.now()))
Ejemplo n.º 9
0
 def __init__(self, in_q, out_q, terminating, avg_len=5):
     self.in_q = in_q
     self.out_q = out_q
     self.avg_len = avg_len
     self.terminating = terminating
     self.process_thd = threading.Thread(target=self.thd_etnry,
                                         name='fps_thd')
     self.fps = FPS()
     self.fps.start()
     self.fps.stop()
     self.last_fps = []
     self.tl = Timeloop()
     _deco = self.tl.job(interval=timedelta(seconds=1))
     _deco(self.fps_job)
Ejemplo n.º 10
0
def main(config):
    address = "[::]:8888"
    server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
    peerstore = Peerstore(config)
    pstore_grpc.add_PeerstoreServicer_to_server(peerstore, server)
    server.add_insecure_port(address)

    tl = Timeloop()
    set_timed_loops(tl, config, peerstore)
    tl.start(block=False)
    logger.info('started timers')

    server.start()
    logger.info('peerstore server {} ...', address)
    server.wait_for_termination()
Ejemplo n.º 11
0
class ExtractorJob():
    def __init__(self, periodicity, text_filter, source):
        self.periodicity = timedelta(seconds=periodicity)
        self.source = source
        self.text_filter = [text_filter]
        self.t1 = Timeloop()
        self.setup_logger()

    def test_function(self):
        self.logger.info('Begin {source_name} test'.format(
            source_name=self.source.__name__))
        print('In test function')

    def target_function(self):
        print('Begin target function')
        # Begin first test
        self.logger.info('Begin {source_name} test'.format(
            source_name=self.source.__name__))
        extractor = self.source()
        # Get the urls
        extractor.get_news_urls(datetime.today())
        # Extract text from news
        extractor.extract_text_from_news()
        # Filter by keywords
        extractor.filter_news_by_keywords(self.text_filter)
        # Close the extractor
        del extractor

    def setup_logger(self):
        # Configure logger: oddcrawler needsd to be the top logger
        self.logger = getLogger('oddcrawler')
        self.logger.setLevel(DEBUG)
        # create file file handler
        fh = FileHandler('extractor_test.log')
        fh.setLevel(DEBUG)
        # create console handler
        ch = StreamHandler()
        ch.setLevel(ERROR)
        # create formatter and add it to handlers
        formatter = Formatter('%(levelname)s %(asctime)-15s %(message)s')
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)
        self.logger.addHandler(fh)
        self.logger.addHandler(ch)

    def run(self):
        self.t1._add_job(self.target_function, interval=self.periodicity)
        self.t1.start(block=True)
Ejemplo n.º 12
0
def visualize(pcoll, dynamic_plotting_interval=None):
    """Visualizes the data of a given PCollection. Optionally enables dynamic
  plotting with interval in seconds if the PCollection is being produced by a
  running pipeline or the pipeline is streaming indefinitely. The function
  always returns immediately and is asynchronous when dynamic plotting is on.

  If dynamic plotting enabled, the visualization is updated continuously until
  the pipeline producing the PCollection is in an end state. The visualization
  would be anchored to the notebook cell output area. The function
  asynchronously returns a handle to the visualization job immediately. The user
  could manually do::

    # In one notebook cell, enable dynamic plotting every 1 second:
    handle = visualize(pcoll, dynamic_plotting_interval=1)
    # Visualization anchored to the cell's output area.
    # In a different cell:
    handle.stop()
    # Will stop the dynamic plotting of the above visualization manually.
    # Otherwise, dynamic plotting ends when pipeline is not running anymore.

  If dynamic_plotting is not enabled (by default), None is returned.

  The function is experimental. For internal use only; no
  backwards-compatibility guarantees.
  """
    if not _pcoll_visualization_ready:
        return None
    pv = PCollectionVisualization(pcoll)
    if ie.current_env().is_in_notebook:
        pv.display_facets()
    else:
        pv.display_plain_text()
        # We don't want to do dynamic plotting if there is no notebook frontend.
        return None

    if dynamic_plotting_interval:
        # Disables the verbose logging from timeloop.
        logging.getLogger('timeloop').disabled = True
        tl = Timeloop()

        def dynamic_plotting(pcoll, pv, tl):
            @tl.job(interval=timedelta(seconds=dynamic_plotting_interval))
            def continuous_update_display():  # pylint: disable=unused-variable
                # Always creates a new PCollVisualization instance when the
                # PCollection materialization is being updated and dynamic
                # plotting is in-process.
                updated_pv = PCollectionVisualization(pcoll)
                updated_pv.display_facets(updating_pv=pv)
                if ie.current_env().is_terminated(pcoll.pipeline):
                    try:
                        tl.stop()
                    except RuntimeError:
                        # The job can only be stopped once. Ignore excessive stops.
                        pass

            tl.start()
            return tl

        return dynamic_plotting(pcoll, pv, tl)
    return None
Ejemplo n.º 13
0
class FPSCounter:
    def __init__(self, in_q, out_q, terminating, avg_len=5):
        self.in_q = in_q
        self.out_q = out_q
        self.avg_len = avg_len
        self.terminating = terminating
        self.process_thd = threading.Thread(target=self.thd_etnry,
                                            name='fps_thd')
        self.fps = FPS()
        self.fps.start()
        self.fps.stop()
        self.last_fps = []
        self.tl = Timeloop()
        _deco = self.tl.job(interval=timedelta(seconds=1))
        _deco(self.fps_job)

    def start_thread(self):
        self.tl.start(block=False)
        self.process_thd.start()
        return self.process_thd

    def thd_etnry(self):
        print("fps thread started")
        while not self.terminating.is_set():
            ret = self.in_q.get()
            self.fps.update()
            try:
                self.out_q.put_nowait(ret)
            except queue.Full:
                pass

        self.tl.stop()
        print("fps thread terminated")

    def get_last_fps(self):
        return sum(self.last_fps) / len(self.last_fps)

    def fps_job(self):
        self.fps.stop()
        self.last_fps.append(self.fps.fps())
        self.fps.start()
        if len(self.last_fps) > self.avg_len:
            self.last_fps.pop(0)
        print(
            f"fps: curr={self.last_fps[-1]:3.2f}, min={min(self.last_fps):3.2f}, avg={self.get_last_fps():3.2f}, max={max(self.last_fps):3.2f}"
        )
Ejemplo n.º 14
0
    def __init__(self, game_object=None):
        super().__init__()
        if not game_object:
            print("No game object found, probably debug mode")
        self.game_object = game_object
        self.layout = QVBoxLayout()
        ##TODO : set widget size dynamically, depending on image size
        button_icon = Image.open("./src/button_icon.jpg")
        image_height = button_icon.height
        image_width = button_icon.width
        button_stylesheet = """ 
            QWidget{
                color: white; 
                height: %s;
                width: %d;
                background-image: url("./src/button_icon.jpg"); 
                background-repeat: no-repeat; 
                background-position: center;
                border: none;
            }
        """ % (image_height, image_width)
        self.credit_button = QPushButton('Click')
        self.credit_button.setStyleSheet(button_stylesheet)
        self.layout.addWidget(self.credit_button)
        self.credit_button.clicked.connect(self.on_credit_click)
        self.score_label = QLabel(str(self.game_object.credits_ects))
        self.score_label.setAlignment(QtCore.Qt.AlignCenter)
        self.professor_label = QLabel(
            str(self.game_object.professor) + " profs!")
        self.professor_label.setAlignment(QtCore.Qt.AlignCenter)
        self.buy_professor_button = QPushButton("Adopte un prof!")
        self.buy_professor_button.clicked.connect(self.on_buy_professor_click)
        self.layout.addWidget(self.buy_professor_button)
        self.layout.addWidget(self.score_label)
        self.layout.addWidget(self.professor_label)
        self.setLayout(self.layout)
        tl = Timeloop()

        @tl.job(interval=timedelta(seconds=10))
        def sample_job_every_10s():
            self.earn_upgrade_credits()

        tl.start()

        self.show()
Ejemplo n.º 15
0
 def start(self):
     ''' Starting server, acccept process and broadcast Threads. '''
     if self.__running:
         return
     try:
         self.__running = True
         self._server_socket.listen(500)
         self.accept_process = Process(target=self.accept_connections)
         self.accept_process.daemon = True
         self.accept_process.start()
         self.__tl = Timeloop()
         self.__tl._add_job(self.broadcast,
                            interval=timedelta(milliseconds=self._interval))
         self.__tl._add_job(self.callApi, interval=timedelta(seconds=2))
         self.__tl.start()
     except BaseException as e:
         logging.getLogger(name='start').error(str(e))
         self.stop()
Ejemplo n.º 16
0
    def on_init_complete(self):
        logger.info("App Init - completed")

        # Run all scoring methods
        if self._auto_update_scoring:
            self.async_scoring(None)

        # Run Cleanup Jobs
        def cleanup_sessions(instance):
            instance.cleanup_sessions()

        cleanup_sessions(self)
        time_loop = Timeloop()
        schedule.every(5).minutes.do(cleanup_sessions, self)

        @time_loop.job(interval=timedelta(seconds=30))
        def run_scheduler():
            schedule.run_pending()

        time_loop.start(block=False)
Ejemplo n.º 17
0
def main():

    config = Config()

    metagraph = Metagraph(config)

    dendrite = Dendrite(config, metagraph)

    nucleus = Nucleus(config)

    neuron = Neuron(config, dendrite, nucleus, metagraph)

    neuron.serve()

    # Start timed calls.
    tl = Timeloop()
    set_timed_loops(tl, config, neuron, metagraph)
    tl.start(block=False)
    logger.info('Started Timers.')

    def tear_down(_config, _neuron, _dendrite, _nucleus, _metagraph):
        logger.debug('tear down.')
        del _neuron
        del _dendrite
        del _nucleus
        del _metagraph
        del _config

    try:
        logger.info('Begin wait on main...')
        while True:
            logger.debug('heartbeat')
            time.sleep(100)

    except KeyboardInterrupt:
        logger.debug('Neuron stopped with keyboard interrupt.')
        tear_down(config, neuron, dendrite, nucleus, metagraph)

    except Exception as e:
        logger.error('Neuron stopped with interrupt on error: ' + str(e))
        tear_down(config, neuron, dendrite, nucleus, metagraph)
Ejemplo n.º 18
0
def wait_until():
    tl = Timeloop()

    thread = MyThread()

    @tl.job(interval=timedelta(seconds=0))
    def sample_job_every_2s():
        fibonacci(20)
        if True:
            print
            "2s job current time : {}".format(time.ctime())

    @tl.job(interval=timedelta(seconds=0))
    def sample_job_every_5s():
        fibonacci(1)
        print
        "5s job current time : {}".format(time.ctime())

    @tl.job(interval=timedelta(seconds=0))
    def sample_job_every_10s():
        fibonacci(20)
        print
        "10s job current time : {}".format(time.ctime())

    tl.start()
    while thread.is_alive():
        print('+++do something')
    tl.stop()
    return True
Ejemplo n.º 19
0
def main(hparams):

    logger.info("Establishing Metagraph Component...")
    metagraph = Metagraph(hparams)

    logger.info("Building Transformer Components...")

    logger.info("Transforming Dataset...")
    lines = download_and_read_file(hparams.dataset)

    logger.info("Building Transformer...")
    nucleus = Transformer(hparams, lines)

    neuron = Neuron(hparams, nucleus, metagraph)
    neuron.serve()

    tl = Timeloop()
    set_timed_loops(tl, hparams, neuron, metagraph)
    tl.start(block=False)
    logger.info("Started timers...")

    def tear_down(_hparams, _neuron, _nucleus, _metagraph):
        logger.debug("Tear down...")
        del _neuron
        del _nucleus
        del _metagraph
        del _hparams

    try:
        logger.info("Begin wait on main...")
        while True:
            logger.debug('heartbeat')
            time.sleep(100)
    except KeyboardInterrupt:
        logger.debug("Neuron stopped with keyboard interrupt.")
        tear_down(hparams, neuron, nucleus, metagraph)

    except Exception as e:
        logger.error("Neuron stopped with interrupt on error: {}".format(e))
        tear_down(hparams, neuron, nucleus, metagraph)
Ejemplo n.º 20
0
    def __init__(self, price_f_config, config_net, connection_net):

        self.options = price_f_config
        self.config_network = config_net
        self.connection_network = connection_net

        # connection network is the brownie connection network
        # config network is our enviroment we want to connect
        network_manager.connect(connection_network=self.connection_network,
                                config_network=self.config_network)

        address_medianizer = self.options['networks'][self.config_network]['addresses']['MoCMedianizer']
        address_pricefeed = self.options['networks'][self.config_network]['addresses']['PriceFeed']

        log.info("Starting with MoCMedianizer: {}".format(address_medianizer))
        log.info("Starting with PriceFeed: {}".format(address_pricefeed))

        self.app_mode = self.options['networks'][self.config_network]['app_mode']

        # simulation don't write to blockchain
        self.is_simulation = False
        if 'is_simulation' in self.options:
            self.is_simulation = self.options['is_simulation']

        # Min prices source
        self.min_prices_source = 1
        if 'min_prices_source' in self.options:
            self.min_prices_source = self.options['min_prices_source']

        # backup writes
        self.backup_writes = 0

        self.tl = Timeloop()
        self.last_price = 0.0
        self.last_price_timestamp = datetime.datetime.now() - datetime.timedelta(seconds=300)

        self.price_source = PriceEngines(self.options['networks'][self.config_network]['price_engines'],
                                         log=log,
                                         app_mode=self.app_mode,
                                         min_prices=self.min_prices_source)
Ejemplo n.º 21
0
    def driver(cls):

        cnx = sqlite3.connect('./api/api.db')
        df = pd.read_sql_query("SELECT * FROM binance_data", cnx)

        df['Open_time'] = pd.to_datetime(df['Open_time'])
        df['Close_time'] = pd.to_datetime(df['Close_time'])
        df.rename(columns={'Open_time': 'Date'}, inplace=True)
        df = df.drop(columns=['Quote_asset_volume', 'Buy_base_asset', 'Buy_quote_asset', 'Ignore'])
        Trading.counter = 0

        start_time = df['Date'].iloc[0]
        last_index = df.shape[0] - 1
        end_time = df['Date'].iloc[last_index]

        tl = Timeloop()

        @tl.job(interval=timedelta(seconds=2))
        def paper_trade():
            sliced_df = Trading.get_next_db(df)
            last_date = sliced_df['Date'].iloc[sliced_df.shape[0] - 1]
            temp = MAOMA.maomasig(sliced_df, start_time, end_time, 'Close', 5, 15)
            net = Portfolio.pf_manage(temp, 'Close')
            print("portfolio value: ", net)
            print("last date is : ", last_date)

        tl.start()
        while True:
            try:
                time.sleep(1)
            except KeyboardInterrupt:
                tl.stop()
                break
Ejemplo n.º 22
0
def main(hparams):
    metagraph = Metagraph(hparams)
    modelfn = Modelfn(hparams)
    nucleus = Nucleus(hparams, modelfn)
    dendrite = Dendrite(hparams, metagraph)
    dataset = Dataset(hparams)
    neuron = Neuron(hparams, nucleus, dendrite, dataset)
    synapse = Synapse(hparams, neuron, metagraph)

    server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
    bittensor_grpc.add_BittensorServicer_to_server(synapse, server)
    server.add_insecure_port(hparams.bind_address + ":" + hparams.port)
    server.start()

    neuron.start_training()

    tl = Timeloop()
    set_timed_loops(tl, hparams, neuron)
    tl.start(block=False)
    logger.info('Started Timers.')

    try:
        logger.info('Begin wait on main...')
        while True:
            logger.debug('heartbeat')
            time.sleep(100)

    except KeyboardInterrupt:
        logger.debug('Neuron stopped with keyboard interrupt.')
        server.stop(2)
        del neuron
        del metagraph
        del synapse

    except Exception as e:
        logger.error('Neuron stopped with interrupt on error: ' + str(e))
        server.stop(2)
        del neuron
        del metagraph
Ejemplo n.º 23
0
class Reactor:
    """ Handles the waiting and firing of events to periodically anchor the the root hash.

    Assumes:
    - The chain/py_anchor_root.js file has all the correct variables defined
    """
    def __init__(self, tree, interval=60):
        """sets up the async event loop
        Parameters:
        tree - the same merkle tree instance being anchored
        interval - the optional parameter for setting the time interval of the anchor.

        Returns: an object that tracks the current phase, spawns subtasks
        """
        self.tree = tree
        self.loop = Timeloop()
        self.chain = Chain()
        self.interval = interval
        logger.info("The reactor was started at time: {time}",
                    time=datetime.timestamp(datetime.now()))

    def start(self):

        # Kick off the anchoring event every 30 seconds
        @self.loop.job(interval=timedelta(seconds=self.interval))
        def anchor_tree():
            root = self.tree.get_current_root().decode('utf-8')
            self.chain.anchor(root)
            logger.info("anchored root: {} \t at  time: {} \t block: {}", root,
                        datetime.timestamp(datetime.now()), self.chain.block)

        self.loop.start(block=True)

    def stop(self):
        """always ensure the loop has been manually stopped"""
        logger.info("timeloop stopping at time: {} \t block:{}",
                    datetime.timestamp(datetime.now()), self.chain.block)
        self.loop.stop()
Ejemplo n.º 24
0
    def __init__(self, **kwargs):
        self._host = kwargs.get('host', 'http://127.0.0.1')
        self._port = kwargs.get('port', 5984)
        self.address = f'{self._host}:{self._port}'

        self.custom_headers = kwargs.get('custom_headers',
                                         {})  # TODO: implement

        self._keep_alive = kwargs.get('keep_alive', 0)
        self._keep_alive_timeloop = Timeloop()
        self._keep_alive_timeloop.logger.setLevel('WARNING')

        self._name = kwargs.get('username', None)
        self._password = kwargs.get('password', None)
        self.auth_token = kwargs.get('auth_token', None)

        self._auto_connect = kwargs.get('auto_connect', False)

        self._basic_auth = kwargs.get('basic_auth',
                                      False)  # TODO: implement basic auth
        self._admin_party = kwargs.get('admin_party',
                                       False)  # TODO: implement admin party

        self._headers = {
            'Content-type': 'application/json',
            'Accept': 'application/json'
        }

        # reference to this object is required for the CouchDBDecorators.endpoint to be able to update the auth token
        self.session = self

        # TODO: implement a generic Error class to hold error information that consumer can check
        if (self._auto_connect is True and self._basic_auth is False):
            self.authenticate(data={
                'name': self._name,
                'password': self._password
            })
Ejemplo n.º 25
0
class AsyncAgentPrototype():
    #both: 0
    #main: 1
    #self_check: 2
    def __init__(self,t1=None,t2=None):
        self.__t_main = Timeloop()
        self.__t_selfcheck = Timeloop()

        self._intv_main=t1
        self._intv_check=t2
   
    
    # @self.__t_main.job(interval=timedelta(seconds=_intv_main))
    # def sample_job_every_2s(self):
    #     print("2s job current time : {}".format(time.ctime()))

    # @self.__t_selfcheck.job(interval=timedelta(seconds=5))
    # def run_t(self):
    #         print("5s job current time : {}".format(time.ctime()))
    
    @staticmethod
    def __sec2int(sec):
        return timedelta(seconds=sec)

    def start(self,sel=0):
        if not sel==2: self.__t_main.start()
        if not sel==1: self.__t_selfcheck.start()

    def stop(self,sel=0):
        if not sel==2: self.__t_main.stop()
        if not sel==1: self.__t_selfcheck.stop()



    #assumption these event loops always hold ine job. R: get job id on attach
    def set_interval(self,t,sel=0):
        #TODO: clean this by making parameterizing loop.job
        if (not sel==2) and self.__t_main.jobs: self.__t_main.jobs[0].interval=self.__sec2int(t)
        if (not sel==1) and self.__t_selfcheck.jobs: self.__t_selfcheck.jobs[0].interval=self.__sec2int(t)
Ejemplo n.º 26
0
def sync(key_file, user, remote_out_dir, local_sync_dir, every):

    if every == 0:
        # Sync only one time.
        sync_once(key_file, user, remote_out_dir, local_sync_dir)
    else:
        # Set up timer to sync regularly.
        tl = Timeloop()
        tl._add_job(
            sync_once,
            timedelta(seconds=every),
            key_file,
            user,
            remote_out_dir,
            local_sync_dir,
        )
        tl.start(block=True)
Ejemplo n.º 27
0
from settings import *
from mongo_config import weather_data_collection, pollution_data_collection
from timeloop import Timeloop
from datetime import timedelta
import requests
import json
from rockset import Client, ParamDict

tl = Timeloop()


def get_weather_data():
    """ get weather data from climacell """
    url = "https://api.climacell.co/v3/weather/realtime"
    querystring = {
        "lat": "39.9042",
        "lon": "116.4074",
        "unit_system": "us",
        "fields":
        "precipitation,wind_gust,humidity,wind_direction,precipitation_type,visibility,cloud_cover,cloud_base,cloud_ceiling,weather_code,feels_like,temp",
        "apikey": CLIMACELL_API_KEY
    }
    weather_response = requests.request("GET", url, params=querystring)
    return weather_response.json()


def get_air_pollution_data():
    """ get air quality data from climacell """
    url = "https://api.climacell.co/v3/weather/realtime"
    querystring = {
        "lat": "39.9042",
    def notify( self , index ):

        print( f"{ self.connection_status[ index ][ 1 ] }{ self.connection_status[ index ][ 0 ] }" , self.color_reset )

        if self.platform == "windows":
            self.notifier_windows.show_toast ( title = "" , msg = f"Connection Status: { self.connection_status[ index ][ 0 ] }" , duration = self.duration , icon_path = self.ICON_PATH ) 

        elif self.platform == "linux":
            self.n.update(f"Connection Status: { self.connection_status[ index ][ 0 ] }", icon = self.ICON_PATH)
            self.n.show()


router = DLink2730URouterStatus()

# Todo: Start timeloop
timeloop = Timeloop()

@timeloop.job ( interval = timedelta ( seconds = router.duration ) )
def status_notifier():
    
    status = router.get_connection_status()

    # Already reported OR encounter error
    if ( ( router.buffer == status ) or ( status == router.ignore ) ) :
        pass
    # Success
    else:
        router.notify ( status )

        # Update buffer
        router.buffer = status
from web_shop.bot.main import bot
from telebot.apihelper import ApiException
from web_shop.db.models import Customer
from .config import CHECK_TIME_INTERVAL

from timeloop import Timeloop
from datetime import timedelta

tl = Timeloop()


@tl.job(interval=timedelta(seconds=CHECK_TIME_INTERVAL))
def job_del_customer_if_inactive():
    for customer in Customer.objects(is_archived=False):
        try:
            bot.send_chat_action(action='typing', chat_id=customer.user_id)
        except ApiException:
            customer.archive()
            print(f'Archived customer {customer.user_id}')

# tl.start() # block=True
Ejemplo n.º 30
0
def run(
        debug_mode,
        user_config: dict,
        routes: List[Dict[str, str]],
        extra_routes: List[Dict[str, str]],
        start_date: str,
        finish_date: str,
        candles: dict = None,
        chart: bool = False,
        tradingview: bool = False,
        full_reports: bool = False,
        csv: bool = False,
        json: bool = False
) -> None:
    if not jh.is_unit_testing():
        # at every second, we check to see if it's time to execute stuff
        status_checker = Timeloop()
        @status_checker.job(interval=timedelta(seconds=1))
        def handle_time():
            if process_status() != 'started':
                raise exceptions.Termination
        status_checker.start()

    from jesse.config import config, set_config
    config['app']['trading_mode'] = 'backtest'

    # debug flag
    config['app']['debug_mode'] = debug_mode

    # inject config
    if not jh.is_unit_testing():
        set_config(user_config)

    # set routes
    router.initiate(routes, extra_routes)

    store.app.set_session_id()

    register_custom_exception_handler()

    # clear the screen
    if not jh.should_execute_silently():
        click.clear()

    # validate routes
    validate_routes(router)

    # initiate candle store
    store.candles.init_storage(5000)

    # load historical candles
    if candles is None:
        candles = load_candles(start_date, finish_date)
        click.clear()

    if not jh.should_execute_silently():
        sync_publish('general_info', {
            'session_id': jh.get_session_id(),
            'debug_mode': str(config['app']['debug_mode']),
        })

        # candles info
        key = f"{config['app']['considering_candles'][0][0]}-{config['app']['considering_candles'][0][1]}"
        sync_publish('candles_info', stats.candles_info(candles[key]['candles']))

        # routes info
        sync_publish('routes_info', stats.routes(router.routes))

    # run backtest simulation
    simulator(candles, run_silently=jh.should_execute_silently())

    # hyperparameters (if any)
    if not jh.should_execute_silently():
        sync_publish('hyperparameters', stats.hyperparameters(router.routes))

    if not jh.should_execute_silently():
        if store.completed_trades.count > 0:
            sync_publish('metrics', report.portfolio_metrics())

            routes_count = len(router.routes)
            more = f"-and-{routes_count - 1}-more" if routes_count > 1 else ""
            study_name = f"{router.routes[0].strategy_name}-{router.routes[0].exchange}-{router.routes[0].symbol}-{router.routes[0].timeframe}{more}-{start_date}-{finish_date}"
            store_logs(study_name, json, tradingview, csv)

            if chart:
                charts.portfolio_vs_asset_returns(study_name)

            sync_publish('equity_curve', charts.equity_curve())

            # QuantStats' report
            if full_reports:
                price_data = []
                # load close candles for Buy and hold and calculate pct_change
                for index, c in enumerate(config['app']['considering_candles']):
                    exchange, symbol = c[0], c[1]
                    if exchange in config['app']['trading_exchanges'] and symbol in config['app']['trading_symbols']:
                        # fetch from database
                        candles_tuple = Candle.select(
                            Candle.timestamp, Candle.close
                        ).where(
                            Candle.timestamp.between(jh.date_to_timestamp(start_date),
                                                     jh.date_to_timestamp(finish_date) - 60000),
                            Candle.exchange == exchange,
                            Candle.symbol == symbol
                        ).order_by(Candle.timestamp.asc()).tuples()

                        candles = np.array(candles_tuple)

                        timestamps = candles[:, 0]
                        price_data.append(candles[:, 1])

                price_data = np.transpose(price_data)
                price_df = pd.DataFrame(price_data, index=pd.to_datetime(timestamps, unit="ms"), dtype=float).resample(
                    'D').mean()
                price_pct_change = price_df.pct_change(1).fillna(0)
                bh_daily_returns_all_routes = price_pct_change.mean(1)
                quantstats.quantstats_tearsheet(bh_daily_returns_all_routes, study_name)
        else:
            sync_publish('equity_curve', None)
            sync_publish('metrics', None)

    # close database connection
    from jesse.services.db import database
    database.close_connection()