Esempio n. 1
0
 def test_find_breakout_hh50_too_old(self):
     events = [
         Event(Quote({'date': '2010-09-01'}), 'hh20'),
         Event(Quote({'date': '2010-09-20'}), 'hh50'),
         Event(Quote({'date': '2010-11-01'}), 'eod'),
     ]
     self.assertFalse(find_recent_breakout(events, '2010-09-25'))
Esempio n. 2
0
 def test_find_breakout_a_stop(self):
     events = [
         Event(Quote({'date': '2010-09-01'}), 'hh20'),
         Event(Quote({'date': '2010-09-20'}), 'hh50'),
         Event(Quote({'date': '2010-11-01'}), 'stop'),
     ]
     self.assertFalse(find_recent_breakout(events, '2010-09-15'))
def init():
    #int i
    #float sum, avg
    #struct event *evptr;
    
    global TRACE

    TRACE = int(input("Enter TRACE:"))

    random.seed(9999)              # init random number generator
    clocktime = 0.0                # initialize time to 0.0
    rtinit0()
    rtinit1()
    rtinit2()
    rtinit3()

    # initialize future link changes
    if LINKCHANGES == 1:
        event = Event(evtime=10000.0,
                      evtype=LINK_CHANGE,
                      eventity=-1,
                      rtpktptr=None)
        insertevent(event)
        event = Event(evtime=20000.0,
                      evtype=LINK_CHANGE,
                      eventity=-1,
                      rtpktptr=None)
        insertevent(event)
Esempio n. 4
0
 def test_find_breakout_hh20_preceeded_by_exit(self):
     events = [
         Event(Quote({'date': '2010-08-01'}), 'exit'),
         Event(Quote({'date': '2010-09-01'}), 'hh20'),
         Event(Quote({'date': '2010-11-01'}), 'eod'),
     ]
     self.assertFalse(find_recent_breakout(events, '2010-09-15'))
Esempio n. 5
0
 def test_find_breakout_hh20_preceeded_by_stop(self):
     events = [
         Event(Quote({'date': '2010-08-01'}), 'stop'),
         Event(Quote({'date': '2010-09-20'}), 'hh20'),
         Event(Quote({'date': '2010-11-01'}), 'eod'),
     ]
     self.assertEquals(events[1],
                       find_recent_breakout(events, '2010-09-15'))
Esempio n. 6
0
def _receive_event(ipc_manager_mailbox, cpu, data, size):
    event = Event()
    event.setBaseEvent(data)
    if event.action is not None:
        try:
            ipc_manager_mailbox.put_nowait(event)
        except QueueEmpty:
            # TODO: Increment metric
            pass
Esempio n. 7
0
def find_events(symbol):
  events = []
  quotes = Quote.get_quotes(symbol)
  if len(quotes) < 1:
    return events
  quote = quotes[0]
  stop = None
  entry_price = None
  hh50 = False
  hh20 = False
  while quote:
    prev = quote
    quote = quote.next()
    
    if not quote: # no more data
      if entry_price:
        #print "Still winning ", entry_price, hh50, hh20
        events.append(Event(prev, 'eod', prev.close))
      continue
    
    if not hh50 and quote.is_above_50_day_high():
      #print "Found a hh50 event %s" % quote
      events.append(Event(quote, 'hh50'))
      hh50 = True
      if entry_price == None:
        (entry_price, stop) = get_entry_price_and_stop(quote)

    if not hh20 and quote.is_above_20_day_high():
      #print "Found a hh20 event %s" % quote
      events.append(Event(quote, 'hh20'))
      hh20 = True
      if entry_price == None:
        (entry_price, stop) = get_entry_price_and_stop(quote)

    if(hh20 and quote.low < stop): # hit the stop
      #print "Found a stop event %s" % quote
      events.append(Event(quote, 'stop', stop))
      stop = None 
      entry_price = None
      hh50 = None 
      hh20 = None
      continue

    if(hh20 and quote.close > entry_price and quote.close < quote.get_indicator().ll_10 ): # exit
      #print "Found a exit event %s" % quote
      events.append(Event(quote, 'exit', quote.get_indicator().ll_10))
      stop = None 
      entry_price = None
      hh50 = None 
      hh20 = None
      continue
  #print "Found %s events for ticker %s" %(len(events), symbol)
  #print 
  return events
Esempio n. 8
0
def read_logs(paths):
    """
    Iterates over the given log paths and tries to find events (directories) with the correct name pattern.

    :param paths:   a list of or just a single path to events
    :return:        returns a list of the found events
    """
    events = []
    # we expect a list of paths, make it one if its just a string
    if isinstance(paths, str): paths = [paths]
    # iterate over the given paths
    for p in paths:
        if os.path.isdir(p):
            logging.info('Reading %s', os.path.abspath(p))
            # scan for events
            for event in os.listdir(p):
                # make sure, the event directory has the correct naming scheme
                event_dir = os.path.join(p, event)
                # skip files
                if not os.path.isdir(event_dir): continue
                # check directory name
                event_regex = re.match(config['event']['regex'], event)
                if event_regex is not None:
                    events.append(Event(event_dir))
                else:
                    logging.warning('Not a valid event directory: %s',
                                    event_dir)
        else:
            logging.warning('Not a directory: %s', p)

    return events
Esempio n. 9
0
    def _parse_process_exit(self, messages):
        pat_serv_exit = re.compile(
            r'Port server .* on node .* exited with status [0-9]+\. '
            'Restarting')
        pat_port_serv = re.compile(r'Port server (.*) on node')
        pat_exit_status = re.compile(r'exited with status ([0-9]+)')
        for message in messages:
            m = pat_serv_exit.search(message)
            if m:
                m = pat_port_serv.search(message)
                if m:
                    exit_server = m.group(1)
                else:
                    exit_server = 'unknown_port_server'

                m = pat_exit_status.search(message)
                if m:
                    exit_status = m.group(1)
                else:
                    exit_status = 'unknown_exit_status'

                descr = '{} exited with status {} and restarted'.format(
                    exit_server, exit_status)
                self.timeline.add_event(
                    Event(message, 'crash', descr,
                          self.timeline.default_node_name))
Esempio n. 10
0
    def data_received(self, data):
        message = data.decode()
        print('Data received: {!r}'.format(message))
        quartet = self._parse_quartet(message)

        if quartet is not None:
            event = Event()
            event.action = 'flush'
            event.quartet = quartet
            event.transport = self.transport

            try:
                self.ipc_manager_mailbox.put_nowait(event)
            except QueueEmpty:
                # TODO: Increment metric
                pass
Esempio n. 11
0
def interpolate_event(e):

    eventNumber = e.get_eventNumber()
    frames = e.get_frames()
    azis = e.get_azis()
    eles = e.get_eles()

    new_frames = []
    new_azis = []
    new_eles = []

    frame_dist = frames[1:] - frames[:-1]
    for fd_idx, fd in enumerate(frame_dist):
        if fd == 1:
            # contiguous, set next
            new_frames.append(frames[fd_idx])
            new_azis.append(azis[fd_idx])
            new_eles.append(eles[fd_idx])
        else:
            start = frames[fd_idx]
            end = frames[fd_idx + 1]
            new_frames.extend(np.arange(start, end, 1).tolist())
            new_azis.extend(
                np.linspace(azis[fd_idx], azis[fd_idx + 1], fd).tolist())
            new_eles.extend(
                np.linspace(eles[fd_idx], eles[fd_idx + 1], fd).tolist())

    return Event(-1, eventNumber, np.asarray(new_frames), np.asarray(new_azis),
                 np.asarray(new_eles))
Esempio n. 12
0
 def ready(self, e):
     '''
     Finish to load the ship cargo
     and wait for it's departure
     '''
     log.info(f'Ship number {e.details} is already loaded', 'Ready ')
     self.time = max(self.time, e.time)
     self.events.append(Event(self.time, e.details, self.depart))
     return True
Esempio n. 13
0
    def _parse_buckets_not_ready(self, instances):
        pat_bucket_not_ready = re.compile(
            r"The following buckets became not ready on node "
            "'(.*)': \[(.*)\], .*\]$")
        pat_bucket_not_ready_mult = re.compile(
            r"The following buckets became not ready on node '"
            "(.*)': \[\"([^\"]*)\",$")
        pat_bucket_not_ready_mult_middle = re.compile(r' *"(.*)",$')
        pat_bucket_not_ready_mult_end = re.compile(r' *"(.*)"\]')

        for instance in instances:
            single = pat_bucket_not_ready.search(instance[0])
            multi = pat_bucket_not_ready_mult.search(instance[0])

            if single:
                on = extract_nodename(single.group(1),
                                      self.timeline.default_node_name)
                buckets = single.group(2).replace('"', '')
                self.timeline.add_event(
                    Event(instance[0], 'fail',
                          'buckets not ready on {}: `{}`'.format(on, buckets),
                          self.timeline.default_node_name))

            elif multi:
                on = extract_nodename(multi.group(1),
                                      self.timeline.default_node_name)
                buckets = multi.group(2).replace('"', '')

                for line in instance[1:]:
                    m = pat_bucket_not_ready_mult_end.search(line)
                    if m:
                        buckets += ', ' + m.group(1)
                        self.timeline.add_event(
                            Event(
                                instance[0], 'fail',
                                'buckets not ready on {}: `{}`'.format(
                                    on,
                                    buckets), self.timeline.default_node_name))
                        continue

                    m = pat_bucket_not_ready_mult_middle.search(line)
                    if m:
                        buckets += ', ' + m.group(1)
                        continue
Esempio n. 14
0
def test_sparse_csr(m, n, k, nnz, test_count):
    start_timer = Event(enable_timing=True)
    stop_timer = Event(enable_timing=True)

    csr = gen_sparse_csr((m, k), nnz)
    mat = torch.randn(k, n, dtype=torch.double)

    times = []
    for _ in range(test_count):
        start_timer.record()
        csr.matmul(mat)
        stop_timer.record()
        times.append(start_timer.elapsed_time(stop_timer))

    return sum(times) / len(times)
Esempio n. 15
0
    def __init__(self):
        self._address = None
        self._name = None
        self._device = None
        self._subscriber = None
        self._writer = None

        # Create the message map
        self._message_map = {}
        for i in range(MessageType.Count):
            self._message_map[i] = []

        # Setup events
        self.face_up_changed = Event()
        self.battery_voltage_changed = Event()

        self._dtype = None
        self._battery_voltage = -1
        self._face_up = 0
Esempio n. 16
0
 def arrive(self):
     '''
     Generate a new arrival
     '''
     if self.count != self.n:
         log.debug(f"Generating the arrival time of ship number {self.count}", "Arrive")
         time = self.time + exponential(8) * 60
         e = Event(time, self.count, self.enqueue)
         self.count += 1
         self.events.append(e)
     return True
Esempio n. 17
0
 def _parse_flush_start(self, flushes):
     pat_bucket_flush_start = re.compile(
         r'janitor_agent-(.*)<.*Doing local bucket flush')
     for flush in flushes:
         m = pat_bucket_flush_start.search(flush)
         if m:
             bucket = m.group(1)
             self.timeline.add_event(
                 Event(flush, 'flush',
                       'starting to flush bucket `{}`'.format(bucket),
                       self.timeline.default_node_name))
Esempio n. 18
0
 def dock(self, e):
     '''
     Start load the cargo of a ship
     '''
     log.info(f'Ship number {e.details} is loading its cargo', 'Dock  ')
     self.time = max(self.time, e.time)
     self.docks -= 1
     self.bussy = False
     time = self.load_time(self.size[e.details])
     self.events.append(Event(time, e.details, self.ready))
     return True
Esempio n. 19
0
 def _parse_flush_finish(self, flushes):
     pat_bucket_flush_end = re.compile(
         r'janitor_agent-(.*)<.*Local flush is done')
     for flush in flushes:
         m = pat_bucket_flush_end.search(flush)
         if m:
             bucket = m.group(1)
             self.timeline.add_event(
                 Event(flush, 'flush',
                       'flush complete for bucket `{}`'.format(bucket),
                       self.timeline.default_node_name))
Esempio n. 20
0
 def __init__(self, timelines=None, input_dict=None):
     self.events = []
     self.default_node_name = None
     if timelines:
         self.events.extend(event for timeline in timelines
                            for event in timeline.events)
     elif input_dict:
         self.events = [
             Event(input_dict=event_dict)
             for event_dict in input_dict['events']
         ]
Esempio n. 21
0
 def _parse_assertions(self, assertions):
     extract_assert_regex = re.compile(r'(assertion|asssertion|Assert) '
                                       'failed \[(?P<assert>[^\[]+)\] at')
     for assertion in assertions:
         m = extract_assert_regex.search(assertion[4])
         if m:
             self.timeline.add_event(
                 Event(
                     assertion[0], 'assert',
                     'Memcached assertion: `{}`'.format(m.group('assert')),
                     self.timeline.default_node_name))
Esempio n. 22
0
 def _parse_node_add(self, messages):
     pat_node_add = re.compile(r"adding node '(.*)' to nodes_wanted")
     for message in messages:
         m = pat_node_add.search(message)
         if m:
             added_node = extract_nodename(m.group(1),
                                           self.timeline.default_node_name)
         else:
             added_node = 'unknown_node'
         self.timeline.add_event(
             Event(message, 'topology', 'added node {}'.format(added_node),
                   self.timeline.default_node_name))
Esempio n. 23
0
def events():
    events_ = [
        {'time': e.time,
         'type': Event(e.type).name,
         'task_id': e.task_id,
         'task': e.task,
         'flag_submit_id': e.flag_submit_id,
         'flag_submit': e.flag_submit,
         'extra': e.extra}
        for e in EventModel.query.all()
    ]
    return render_template('events.html', events=events_)
Esempio n. 24
0
 def _parse_time_jumps(self, time_jumps):
     for time_jump in time_jumps:
         m = pat_ms.search(time_jump)
         if m:
             action_time = m.group(1)
         else:
             action_time = '?'
         self.timeline.add_event(
             Event(
                 time_jump, 'time_jump',
                 'detected time jump / erlang latency of {}ms'.format(
                     action_time), self.timeline.default_node_name))
Esempio n. 25
0
 def enqueue(self, e):
     '''
     Enqueue a new ship that just arrived
     '''
     size = get_size(self.prob)
     self.size[e.details] = size
     self.arrivals[e.details] = e.time
     self.time = max(self.time, e.time)
     log.info(f'Ship number {e.details} arrive to the port', 'Enqueue ')
     self.events.append(Event(self.time, e.details, self.move))
     bublle_sort_last(self.events)
     return self.arrive()
Esempio n. 26
0
    def do_GET(self):

        if None != re.search("/set", self.path):
            n = urllib.parse.urlparse(self.path)
            x = urllib.parse.parse_qs(n.query)

            Event.fire(Event.SettingsMessage(x))

            self.send_response(200)
            self.send_header('Content-Type', 'text/html')
            self.end_headers()
            output = str(x)
            self.wfile.write(output.encode('utf-8'))
            return
        else:
            self.send_response(404)
            self.send_header('Content-Type', 'text/html')
            self.end_headers()
            output = "404 requested endpoint not available"
            self.wfile.write(output.encode('utf-8'))
            return
Esempio n. 27
0
 def _parse_disk_space(self, messages):
     pat_disk_usage = re.compile(
         r'Usage of disk "(.*)" on node "(.*)" is around ([0-9]+%)')
     for message in messages:
         m = pat_disk_usage.search(message)
         if m:
             disk = m.group(1)
             on_node = m.group(2)
             perc = m.group(3)
             descr = '{} usage on {} is {}'.format(disk, on_node, perc)
             self.timeline.add_event(
                 Event(message, 'fail', descr,
                       self.timeline.default_node_name))
Esempio n. 28
0
 def move(self, e):
     '''
     Move a ship to a dock
     '''
     if (self.docks == 0) or self.bussy:
         log.debug("Imposible to move ship number {e.details} at this moment", "Move ")
         return False
     self.go(1)
     log.info(f'Ship number {e.details} is being moved to a dock', 'Move  ')
     self.bussy = True
     time = self.time + exponential(2) * 60
     self.events.append(Event(time, e.details, self.dock))
     return True
Esempio n. 29
0
 def _parse_data_loss(self, messages):
     pat_data_lost = re.compile(
         r'Data has been lost for ([0-9]%) of vbuckets in bucket "(.*)"\.')
     for message in messages:
         m = pat_data_lost.search(message)
         if m:
             perc = m.group(1)
             bucket = m.group(2)
             descr = 'bucket {} has lost data for {} of vbuckets'.format(
                 bucket, perc)
             self.timeline.add_event(
                 Event(message, 'fail', descr,
                       self.timeline.default_node_name))
Esempio n. 30
0
 def depart(self, e):
     '''
     Move a ship out of the docks
     '''
     if self.bussy:
         log.debug(f"Tug is bussy, ship number {e.details} stay at dock", "Depart")
         return False
     log.info(f'Ship number {e.details} is being moved back to the port', 'Depart')
     self.go(0)
     self.docks += 1
     self.bussy = True
     time = self.time + exponential(1) * 60
     self.events.append(Event(time, e.details, self.done))
     return True