예제 #1
0
def main():
    time_1 = time_time()

    output = "Content-Type: text/html\n"

    ctxt = {
        "env": env,
        "location": {
            "href": "",
            "search": QUERY_STRING and ('?' + QUERY_STRING) or ''
        },
        "dom": TplHelper(),
        "tpl": DomHelper()
    }

    try:
        rslt = tplLoader.load(TPL_FILENAME,
                              relative_to=DOCUMENT_ROOT).generate(
                                  **ctxt).render('xhtml',
                                                 doctype='xhtml-transitional')

        output += "TmplRender-Time: %f s\n\n" % (time_time() - time_1)
        output += rslt

        del time_1
        del rslt
    except:
        output = "Content-Type: text/plain\n\n"
        output += str(sys.exc_info()[1]).replace(DOCUMENT_ROOT,
                                                 '$DOCUMENT_ROOT$')

    print output
    del output
예제 #2
0
파일: handler.py 프로젝트: kindy/ehtml-py
def main ():
    time_1 = time_time()
    
    output = "Content-Type: text/html\n"
    
    ctxt = {"env": env,
            "location": {"href": "", "search": QUERY_STRING and ('?' + QUERY_STRING) or ''},
            "dom": TplHelper(),
            "tpl": DomHelper()
        }
    
    try:
        rslt = tplLoader.load(TPL_FILENAME, relative_to=DOCUMENT_ROOT).generate(**ctxt).render('xhtml', doctype='xhtml-transitional')
        
        output += "TmplRender-Time: %f s\n\n" % (time_time() - time_1)
        output += rslt
        
        del time_1
        del rslt
    except:
        output = "Content-Type: text/plain\n\n"
        output += str(sys.exc_info()[1]).replace(DOCUMENT_ROOT, '$DOCUMENT_ROOT$')
    
    print output
    del output
 def syncTime(self):
     if self.xem:
         self.xem.ActivateTriggerIn(0x40, 15)
         logging.getLogger(__name__).info("Time synchronized at {0}".format(time_time()))
     else:
         logging.getLogger(__name__).error("No time synchronization because FPGA is not available")
     self.timeTickOffset = time_time()        
예제 #4
0
 def worker(self, worker_arg):
     fails = 0
     start_time = time_time()
     while time_time(
     ) - start_time < self.timeout and fails <= self.max_failure:
         try:
             f = self.q.get(timeout=self.wait_empty_secs)
             if f is self._done_signal:
                 break
         except TimeoutError:
             if self.done:
                 break
             fails += 1
             continue
         try:
             result = self.callback(f.arg, worker_arg)
         except self.handle_exceptions as err:
             logger.error(
                 'Raised {err!r}, worker_arg: {worker_arg}, todo_arg: {arg}'
                 .format_map(
                     dict(err=err,
                          worker_arg=repr(worker_arg)[:100],
                          arg=repr(f.arg)[:100])))
             result = self.fail_returned
         if result == self.fail_returned:
             self.q.put(f)
             fails += 1
             sleep(self.wait_empty_secs)
             continue
         else:
             f.set_result(result)
             if fails > 0:
                 fails -= 1
     self.q.put_nowait
 def syncTime(self):
     if self.xem:
         self.xem.ActivateTriggerIn(0x40, 15)
         logging.getLogger(__name__).info("Time synchronized at {0}".format(
             time_time()))
     else:
         logging.getLogger(__name__).error(
             "No time synchronization because FPGA is not available")
     self.timeTickOffset = time_time()
예제 #6
0
    def test(shift_register, clear=False):
        '''
			Time one write.
		'''
        if clear:
            shift_register.clear()
        a = time_time()
        shift_register.from_list([0 for i in range(len(shift_register) - 1)] +
                                 [1], )
        return time_time() - a
예제 #7
0
def main():
    parser = argparse_ArgumentParser("Input parameters")
    parser.add_argument("--input_file_name",
                        default="input_toy.yaml",
                        help="Input parameters file name")
    parser.add_argument("--graph_files_dir",
                        default="",
                        help="Graph files' folder path")
    parser.add_argument("--out_dir_name",
                        default="/results",
                        help="Output directory name")
    args = parser.parse_args()
    with open(args.input_file_name, 'r') as f:
        inputs = yaml_load(f, yaml_Loader)

    # Override output directory name if same as gen
    if args.out_dir_name or inputs['out_comp_nm'] == "/results/res":
        if not os_path.exists(inputs['dir_nm'] + args.out_dir_name):
            os_mkdir(inputs['dir_nm'] + args.out_dir_name)
        inputs['out_comp_nm'] = args.out_dir_name + "/res"

    inputs['graph_files_dir'] = ''
    if args.graph_files_dir:
        if not os_path.exists(inputs['dir_nm'] + args.graph_files_dir):
            os_mkdir(inputs['dir_nm'] + args.graph_files_dir)
        inputs['graph_files_dir'] = args.graph_files_dir

    with open(inputs['dir_nm'] + inputs['out_comp_nm'] + "_input.yaml",
              'w') as outfile:
        yaml_dump(inputs, outfile, default_flow_style=False)

    logging_basicConfig(filename=inputs['dir_nm'] + inputs['out_comp_nm'] +
                        "_logs.yaml",
                        level=logging_INFO)
    start_time_read = time_time()
    myGraph = read_graphs(inputs)
    read_time = time_time() - start_time_read

    myGraphName = inputs['dir_nm'] + inputs['graph_files_dir'] + "/res_myGraph"
    with open(myGraphName, 'wb') as f:
        pickle_dump(myGraph, f)

    tot_time = time_time() - start_time

    out_comp_nm = inputs['dir_nm'] + inputs['out_comp_nm']
    # Write to yaml file instead
    with open(out_comp_nm + '_runtime_performance.out', "a") as fid:
        print("Read network time (s) = ",
              read_time,
              "[",
              round(100 * float(read_time) / tot_time, 2),
              "%]",
              file=fid)
        print("Total time (s) = ", tot_time, file=fid)
예제 #8
0
 def _generate_timestamp(self, tag):
     # Make sure we use only int/float Epoch time
     if tag is None:
         self.timestamp = time_time()
         return
     try:
         tim = helpers.datetime_tuple(tag)
         self.timestamp = timegm(tim)
     except Exception:
         log.error('wrong timestamp, ignoring it: ' + tag)
         self.timestamp = time_time()
예제 #9
0
 def wait_all_tasks_done(self,
                         timeout=NotSet,
                         delay: float = 0.5,
                         interval: float = 0.1):
     """Block, only be used while loop running in a single non-main thread. Not SMART!"""
     timeout = self._timeout if timeout is NotSet else timeout
     timeout = timeout or float("inf")
     start_time = time_time()
     time_sleep(delay)
     while 1:
         if not self.todo_tasks:
             return self.all_tasks
         if time_time() - start_time > timeout:
             return self.done_tasks
         time_sleep(interval)
예제 #10
0
파일: server.py 프로젝트: kolomenkin/limbo
def root_page():
    log('Root page is requested')
    files = []
    items = storage.enumerate_files()
    now = time_time()
    for item in items:
        full_disk_filename = item['full_disk_filename']
        url_filename = item['url_filename']
        display_filename = item['display_filename']
        modified_unixtime = get_file_modified_unixtime(full_disk_filename)
        files.append(
            {
                'display_filename': display_filename,
                'url': URLPREFIX + urllib_quote(url_filename),
                'url_filename': url_filename,
                'size': format_size(os_path.getsize(full_disk_filename)),
                'age': format_age(now - modified_unixtime),
                'sortBy': now - modified_unixtime,
            })
    files = sorted(files, key=lambda item: item['sortBy'])
    return {
            'title': 'Limbo: the file sharing lightweight service',
            'h1': 'Limbo. The file sharing lightweight service',
            'files': files,
        }
    def __init__(self, dataQueue=None, commandPipe=None, loggingQueue=None, sharedMemoryArray=None):
        Process.__init__(self)
        OKBase.__init__(self)
        self.dataQueue = dataQueue
        self.commandPipe = commandPipe
        self.running = True
        self.loggingQueue = loggingQueue
        self.sharedMemoryArray = sharedMemoryArray
        
        # PipeReader stuff
        self.state = self.analyzingState.normal
        self.data = Data()
        self.dedicatedData = self.dedicatedDataClass(time_time())
        self.timestampOffset = 0

        self._shutter = 0
        self._trigger = 0
        self._counterMask = 0
        self._adcMask = 0
        self._integrationTime = Q(100, 'ms')
        
        self.logicAnalyzerEnabled = False
        self.logicAnalyzerStopAtEnd = False
        self.logicAnalyzerData = LogicAnalyzerData()
        
        self.logicAnalyzerBuffer = bytearray()
        self.logicAnalyzerReadStatus = 0      #
        self._pulserConfiguration = None
예제 #12
0
def _get_timestamp() -> str:
    """Generate timestamp."""
    try:
        return hexlify(pack(STRUCT_PACKING_FORMAT,
                            int(round(time_time())))).decode(ENCODING_CODEC)
    except HANDLED_EXCEPTIONS as ex:
        raise EncodingError('failed to generate timestamp') from ex
    def __init__(self,
                 dataQueue=None,
                 commandPipe=None,
                 loggingQueue=None,
                 sharedMemoryArray=None):
        Process.__init__(self)
        OKBase.__init__(self)
        self.dataQueue = dataQueue
        self.commandPipe = commandPipe
        self.running = True
        self.loggingQueue = loggingQueue
        self.sharedMemoryArray = sharedMemoryArray

        # PipeReader stuff
        self.state = self.analyzingState.normal
        self.data = Data()
        self.dedicatedData = self.dedicatedDataClass(time_time())
        self.timestampOffset = 0

        self._shutter = 0
        self._trigger = 0
        self._counterMask = 0
        self._adcMask = 0
        self._integrationTime = Q(100, 'ms')

        self.logicAnalyzerEnabled = False
        self.logicAnalyzerStopAtEnd = False
        self.logicAnalyzerData = LogicAnalyzerData()

        self.logicAnalyzerBuffer = bytearray()
        self.logicAnalyzerReadStatus = 0  #
        self._pulserConfiguration = None
예제 #14
0
    def stop(self, name=''):
        """
        Закрывает все незавершенные замеры
        """
        # Закрываем интервал по имени
        if name:
            self.time_intervals[name]['intervals'][-1]['end'] = time_time()
            return self

        # либо все незакрытые интервалы
        for intervals_info in self.time_intervals.values():
            for interval in intervals_info['intervals']:
                if 'end' in interval: continue
                interval['end'] = time_time()

        if self.timer_name:
            self.stop(self.timer_name)
        return self
예제 #15
0
    def __generate_hash():
        b = random_getrandbits(64)
        c = random_getrandbits(16)

        t = time_time()

        # Use the entropic fraction.
        f = math_modf(t)[0]
        e = int(f * 1000000)

        x = (c << 32) | e
        return b ^ x
예제 #16
0
    def _retension_thread_procedure(self):
        log('FileStorage: Retension thread started')
        previous_check_time = 0
        while True:
            try:
                now = time_time()
                if now - previous_check_time > 10 * 60:  # every 10 minutes
                    log('FileStorage: Check for outdated files')
                    previous_check_time = time_time()
                    self._check_retention()

                # Wait for 60 seconds with a possibility
                # to be interrupted through stop() call:
                with self._condition_stop:
                    if not self._stopping:
                        self._condition_stop.wait(60)
                    if self._stopping:
                        log('Retension thread found stop signal')
                        break
            except Exception:
                logging_error(traceback_format_exc())
                time_sleep(60)  # prevent from flooding
예제 #17
0
def evaluate_one_model_keras(model_dirpath):
    # rename _trained as _evaluating
    new_folder_name = model_dirpath.replace('_trained', '_evaluating')
    shutil_move(model_dirpath, new_folder_name)
    model_name = os_path_basename(new_folder_name)
    copied_scan_battery_dirname = os_path_join(
        new_folder_name, os_path_basename(SCAN_BATTERIES_DIRNAME))
    copy_anything(SCAN_BATTERIES_DIRNAME, copied_scan_battery_dirname)

    time_start = time_time()

    # with Pool() as pool:
    #     list(pool.imap_unordered(process_single_target, target_dirnames))
    for scan_battery_dirname in glob_glob(
            os_path_join(SCAN_BATTERIES_DIRNAME, '*')):
        process_single_scan_battery_keras(new_folder_name,
                                          scan_battery_dirname)
    print('{}: it took {:.2f} to evaluate model {} for all scan batteries'.
          format(SCRIPT_FNAME,
                 time_time() - time_start, model_name))
    shutil_move(new_folder_name,
                new_folder_name.replace('_evaluating', '_evaluated'))
예제 #18
0
 def _invoke_callbacks(self):
     """Record the task_end_time & task_cost_time, set result for self._callback_result."""
     self.task_end_time = time_time()
     self.task_cost_time = self.task_end_time - self.task_start_time
     with self._condition:
         for callback in self._done_callbacks:
             try:
                 result = callback(self)
                 if callback in self._user_callbacks:
                     self._callback_result = result
             except Exception as e:
                 logger.error("exception calling callback for %s" % e)
         self._condition.notify_all()
예제 #19
0
    def _add_to_cache(self, r_name: str, r_type: str, r_data: str) -> None:
        with self._cache_lock:
            if (r_name, r_type) not in self._dns_cache:
                self._dns_cache[(r_name, r_type)] = {r_data}
            else:
                self._dns_cache[(r_name, r_type)].add(r_data)

            expiration_time = int(time_time()) + self._SERVER_TTL
            if expiration_time in self._expiration_dict:
                self._expiration_dict[expiration_time].add(
                    (r_name, r_type, r_data))
            else:
                self._expiration_dict[expiration_time] = {(r_name, r_type,
                                                           r_data)}
 def __init__(self):
     self.count = defaultdict(list)  # list of counts in the counter channel
     self.timestamp = None
     self.timestampZero = None
     self.scanvalue = None  # scanvalue
     self.final = False
     self.other = list()
     self.overrun = False
     self.exitcode = 0
     self.dependentValues = list()  # additional scan values
     self.evaluated = dict()
     self.result = None  # data received in the result channels dict with channel number as key
     self.externalStatus = None
     self._creationTime = time_time()
     self.timeTick = defaultdict(list)
     self.timeTickOffset = 0.0
     self.timingViolations = None
 def __init__(self):
     self.count = defaultdict(list)       # list of counts in the counter channel
     self.timestamp = None   
     self.timestampZero = None
     self.scanvalue = None                           # scanvalue
     self.final = False
     self.other = list()
     self.overrun = False
     self.exitcode = 0
     self.dependentValues = list()                   # additional scan values
     self.evaluated = dict()
     self.result = None                              # data received in the result channels dict with channel number as key
     self.externalStatus = None
     self._creationTime = time_time()
     self.timeTick = defaultdict(list)
     self.timeTickOffset = 0.0
     self.timingViolations = None
예제 #22
0
    def add_point(self, name='', description='', prolong=True):
        """
        Добавление нового периода замера
        :param name: Наименование замера
        :param description: Описание замера
        """
        # закрываем все предыдущие замеры
        self.stop()

        # Назначаем имя новому замеру
        name = name if name else self._unused_name()

        # Стартуем замер
        if not name in self.time_intervals or not prolong:
            self.time_intervals[name] = {'intervals': [], 'description': description}
        self.time_intervals[name]['intervals'].append({'beginning': time_time()})
        return self
예제 #23
0
    def _expiration_checking(self) -> None:
        while True:
            time_sleep(1)

            with self._cache_lock:
                current_check_time = int(time_time())
                for some_time in range(self._last_check_time,
                                       current_check_time):
                    if some_time in self._expiration_dict:
                        for r_name, r_type, r_data in self._expiration_dict.pop(
                                some_time):
                            if (r_name, r_type) in self._dns_cache:
                                self._dns_cache[(r_name,
                                                 r_type)].remove(r_data)

                                if len(self._dns_cache[(r_name, r_type)]) == 0:
                                    self._dns_cache.pop((r_name, r_type))

                self._last_check_time = current_check_time
예제 #24
0
def unrecognized(update: Update, context: CallbackContext) -> None:
    chat_id = update.message.chat_id
    chat_data = Data.update(chat_id)
    
    time = int(time_time())
    command = update.message.text
    
    if time - chat_data[chat_id][UNRECOGNIZED][2] > 20 or command != chat_data[chat_id][UNRECOGNIZED][1]:
        chat_data[chat_id][UNRECOGNIZED][0] = 0
        chat_data[chat_id][UNRECOGNIZED][1] = command
        chat_data[chat_id][UNRECOGNIZED][2] = time
    else:
        if chat_data[chat_id][UNRECOGNIZED][0] == UNRECOGNIZED_LEN:
            chat_data[chat_id][UNRECOGNIZED][0] = 1
        else:
            chat_data[chat_id][UNRECOGNIZED][0] += 1
        update.message.reply_text(UNRECOGNIZED_MESSAGES[chat_data[chat_id][UNRECOGNIZED][0]-1])
    
    Data.write(chat_data)
예제 #25
0
    def _init_mainloop(self) -> None:
        try:
            with open(self._SERVER_CACHE_FILE,
                      "rb") as cache_file, self._cache_lock:
                self._dns_cache = pickle_load(cache_file)
                self._expiration_dict = pickle_load(cache_file)
                self._last_check_time = pickle_load(cache_file)

            os_remove(self._SERVER_CACHE_FILE)
        except OSError:
            self._dns_cache = dict()
            self._expiration_dict = dict()
            self._last_check_time = int(time_time())

        console_listener = Thread(target=self._console_listening, daemon=True)
        console_listener.start()

        expiration_checker = Thread(target=self._expiration_checking,
                                    daemon=True)
        expiration_checker.start()
예제 #26
0
 def __init__(self,
              coro,
              *,
              loop=None,
              callback: Union[Callable, Sequence] = None,
              extra_args=None):
     assert iscoroutine(coro), repr(coro)
     super().__init__(coro, loop=loop)
     self._callback_result = NotSet
     self.extra_args = extra_args or ()
     self.task_start_time = time_time()
     self.task_end_time = 0.0
     self.task_cost_time = 0.0
     if callback:
         if not isinstance(callback, (list, tuple, set)):
             callback = [callback]
         self.add_done_callback(self.set_task_time)
         for fn in callback:
             # custom callback will update the _callback_result
             self.add_done_callback(self.wrap_callback(fn))
예제 #27
0
파일: tools.py 프로젝트: fabaff/aioswitcher
def _get_timestamp() -> str:
    """Generate hexadecimal represntation of the current timestamp.

    Return:
      Hexadecimal represntation of the current unix time retrieved by
      ``time.time``.

    Raises:
      aioswitcher.erros.DecodingError: when failed to analyze the timestamp.

    Note:
      This is a private function containing blocking code.
      Please consider using ``get_timestamp`` (without the `_`),
      to schedule as a task in the event loop.

    """
    try:
        return hexlify(pack(STRUCT_PACKING_FORMAT,
                            int(round(time_time())))).decode(ENCODING_CODEC)
    except HANDLED_EXCEPTIONS as ex:
        raise DecodingError("failed to generate timestamp") from ex
예제 #28
0
    def get_empty_block(self, sync_height):
        sync_blocks(self.nodes, height=sync_height)
        node0 = self.nodes[0]

        hashprev = uint256_from_str(unhexlify(node0.getbestblockhash())[::-1])
        snapshot_hash = get_tip_snapshot_meta(node0).hash

        if len(self.spendable_outputs) > 0:
            block_time = self.spendable_outputs[-1].nTime + 1
        else:
            block_time = int(time_time()) + 2

        block = create_block(hashprev=hashprev,
                             coinbase=sign_coinbase(
                                 self.nodes[0],
                                 create_coinbase(height=sync_height + 1,
                                                 stake=node0.listunspent()[0],
                                                 snapshot_hash=snapshot_hash)),
                             nTime=block_time)
        block.solve()

        return block
예제 #29
0
 def __init__(self,
              timeout=None,
              args=None,
              kwargs=None,
              callback=None,
              catch_exception=True):
     super(NewFuture, self).__init__()
     self._timeout = timeout
     self._args = args or ()
     self._kwargs = kwargs or {}
     self._callback_result = None
     self.catch_exception = catch_exception
     self.task_start_time = time_time()
     self.task_end_time = 0
     self.task_cost_time = 0
     self._user_callbacks = set()
     if callback:
         if not isinstance(callback, (list, tuple)):
             callback = [callback]
         for fn in callback:
             self.add_done_callback(fn)
             self._user_callbacks.add(fn)
예제 #30
0
    def _check_retention(self):
        now = time_time()
        if not os_path.isdir(self._storage_directory):
            return
        for file in os_listdir(self._storage_directory):
            fullname = os_path.join(self._storage_directory, file)
            if os_path.isfile(fullname):
                modified_unixtime = get_file_modified_unixtime(fullname)
                if now - modified_unixtime > self._max_store_time_seconds:
                    log('FileStorage: Remove outdated file: ' + fullname +
                        '"; size: ' + str(os_path.getsize(fullname)))
                    os_remove(fullname)

        if not os_path.isdir(self._temp_directory):
            return
        for file in os_listdir(self._temp_directory):
            fullname = os_path.join(self._temp_directory, file)
            if os_path.isfile(fullname):
                modified_unixtime = get_file_modified_unixtime(fullname)
                if now - modified_unixtime > 15 * 60:  # every 15 minutes
                    log('FileStorage: Remove outdated temp file: ' + fullname +
                        '"; size: ' + str(os_path.getsize(fullname)))
                    os_remove(fullname)
예제 #31
0
    def preprocessing_for_icfof(self,
                                ntss_tmp,
                                bool_print: bool = False,
                                count_num_node: bool = False):
        """
        Allows us to appeal, for the id_tree tree, to the two methods of preprocessing:
         * :func:`~pyCFOFiSAX.tree_iSAX.TreeISAX._minmax_obj_vs_node`,
         * :func:`~pyCFOFiSAX.tree_iSAX.TreeISAX.distrib_nn_for_cdf`.

        :param ntss_tmp: Reference sequences
        :param boolean bool_print: if True, Displays the times of each preprocessing step
        :param boolean count_num_node: if True, count the number of nodes

        :returns: if ``count_num_node`` True, Returns the number of nodes in the tree
        :rtypes: int
        """

        start_time = time_time()
        self.min_array, self.max_array = self._minmax_obj_vs_node(
            ntss_tmp, bool_print)
        if bool_print:
            print("_minmax_obj_vs_node --- %s seconds ---" %
                  (time_time() - start_time))
            stdout.flush()

        start_time = time_time()
        self.distrib_nn_for_cdf(ntss_tmp, bool_print)
        if bool_print:
            print("pretrait cdf --- %s seconds ---" %
                  (time_time() - start_time))
            stdout.flush()

        start_time = time_time()
        self._minmax_obj_vs_nodeleaf()
        if bool_print:
            print("pretrait _minmax_obj_vs_node_leaf --- %s seconds ---" %
                  (time_time() - start_time))
            stdout.flush()

        self._preprocessing_computed = True
        if count_num_node:
            return self.num_nodes
예제 #32
0
        else:
            print('Got event', e)
#        Yield()

def signaller(event):
    print('signaller', event, 'starting')
    while True:
        Sleep(0.9)
        print('Signalling')
        event.Signal()


if __name__ == '__main__':
#    threads.SpawnScheduler()

    start = time_time()

#     Spawn(thread_one)
    thread_two = Spawn(thread_two)
    Spawn(thread_three)

#     Spawn(ticker, 'ticker one')
#     Spawn(ticker, 'ticker two')

    event = Event()
    Spawn(wait_for, event, 0.5)
    Spawn(signaller, event)

    end_event = Event()
    ok = end_event.Wait()
    print('saw end_event', ok)
예제 #33
0
    def generate(self):
        self.ptype = self.presence_obj.ptype
        self.fjid = self.presence_obj.fjid
        self.jid = self.presence_obj.jid
        self.room_jid = self.presence_obj.jid
        self.nick = self.presence_obj.resource
        self.show = self.presence_obj.show
        self.status = self.presence_obj.status
        self.avatar_sha = self.presence_obj.avatar_sha
        self.errcode = self.presence_obj.errcode
        self.errmsg = self.presence_obj.errmsg
        self.errcon = self.stanza.getError()
        self.get_gc_control()
        self.gc_contact = app.contacts.get_gc_contact(self.conn.name,
            self.room_jid, self.nick)

        if self.ptype == 'error':
            return True

        if self.ptype and self.ptype != 'unavailable':
            return
        if app.config.get('log_contact_status_changes') and \
        app.config.should_log(self.conn.name, self.room_jid):
            if self.gc_contact:
                jid = self.gc_contact.jid
            else:
                jid = self.stanza.getJid()
            st = self.status
            if jid:
                # we know real jid, save it in db
                st += ' (%s)' % jid
            show = app.logger.convert_show_values_to_db_api_values(self.show)
            if show is not None:
                fjid = nbxmpp.JID(self.fjid)
                app.logger.insert_into_logs(self.conn.name,
                                            fjid.getStripped(),
                                            time_time(),
                                            KindConstant.GCSTATUS,
                                            contact_name=fjid.getResource(),
                                            message=st,
                                            show=show)


        # NOTE: if it's a gc presence, don't ask vcard here.
        # We may ask it to real jid in gui part.
        self.status_code = []
        ns_muc_user_x = self.stanza.getTag('x', namespace=nbxmpp.NS_MUC_USER)
        if ns_muc_user_x:
            destroy = ns_muc_user_x.getTag('destroy')
        else:
            destroy = None
        if ns_muc_user_x and destroy:
            # Room has been destroyed. see
            # http://www.xmpp.org/extensions/xep-0045.html#destroyroom
            self.reason = _('Room has been destroyed')
            r = destroy.getTagData('reason')
            if r:
                self.reason += ' (%s)' % r
            if destroy.getAttr('jid'):
                try:
                    jid = helpers.parse_jid(destroy.getAttr('jid'))
                    self.reason += '\n' + \
                        _('You can join this room instead: %s') % jid
                except helpers.InvalidFormat:
                    pass
            self.status_code = ['destroyed']
        else:
            self.reason = self.stanza.getReason()
            conditions = self.stanza.getStatusConditions()
            if conditions:
                self.status_code = []
                for condition in conditions:
                    if condition in CONDITION_TO_CODE:
                        self.status_code.append(CONDITION_TO_CODE[condition])
            else:
                self.status_code = self.stanza.getStatusCode()

        self.role = self.stanza.getRole()
        self.affiliation = self.stanza.getAffiliation()
        self.real_jid = self.stanza.getJid()
        self.actor = self.stanza.getActor()
        self.new_nick = self.stanza.getNewNick()
        return True
예제 #34
0
        def makePlot(self):
            timeArray = []
            uArray = []
            pidArray = []

            if self.Parent.settingsWindow.onlyUGraphRadioButton.isChecked():
                startTime = time_time()
                while time_time() - startTime < self.secondsSpinBox.value():
                    timeArray.append(time_time() - startTime)
                    uArray.append(tivaConn.read('U')[0])
                    pidArray.append(0.0)
                    self.plotProgressBar.setValue(
                        100.0 * ((time_time() - startTime) /
                                 self.secondsSpinBox.value()))
                    QApplication.processEvents()

            elif self.Parent.settingsWindow.onlyPIDGraphRadioButton.isChecked(
            ):
                startTime = time_time()
                while time_time() - startTime < self.secondsSpinBox.value():
                    timeArray.append(time_time() - startTime)
                    pidArray.append(tivaConn.read('PID')[0])
                    uArray.append(0.0)
                    self.plotProgressBar.setValue(
                        100.0 * ((time_time() - startTime) /
                                 self.secondsSpinBox.value()))
                    QApplication.processEvents()

            elif self.Parent.settingsWindow.bothGraphsRadioButton.isChecked():
                startTime = time_time()
                while time_time() - startTime < self.secondsSpinBox.value():
                    timeArray.append(time_time() - startTime)
                    uArray.append(tivaConn.read('U')[0])
                    pidArray.append(tivaConn.read('PID')[0])
                    self.plotProgressBar.setValue(
                        100.0 * ((time_time() - startTime) /
                                 self.secondsSpinBox.value()))
                    QApplication.processEvents()

            self.Parent.statusBar().showMessage("Points in plot: {}".format(
                len(timeArray)))

            self.uGraph.setParent(None)
            self.uGraphToolbar.setParent(None)
            if self.Parent.settingsWindow.onlyPIDGraphRadioButton.isChecked():
                self.uGraph = Graph(xlabel='Time, seconds',
                                    ylabel='Voltage, Volts',
                                    auto_ylim=False,
                                    ymin=0,
                                    ymax=3.3)
            else:
                self.uGraph = Graph(Xarray=timeArray,
                                    Yarray=uArray,
                                    xlabel='Time, seconds',
                                    ylabel='Voltage, Volts',
                                    auto_ylim=False,
                                    ymin=0,
                                    ymax=3.3)
            self.uGraphToolbar = self.uGraph.getGraphToolbar()

            self.pidGraph.setParent(None)
            self.pidGraphToolbar.setParent(None)
            if self.Parent.settingsWindow.onlyUGraphRadioButton.isChecked():
                self.pidGraph = Graph(ylabel='PID-output')
            else:
                self.pidGraph = Graph(Xarray=timeArray,
                                      Yarray=pidArray,
                                      ylabel='PID-output')
            self.pidGraphToolbar = self.pidGraph.getGraphToolbar()

            self.plotBox.addWidget(self.uGraph)
            self.plotBox.addWidget(self.uGraphToolbar)
            self.plotBox.setAlignment(self.uGraphToolbar, Qt.AlignCenter)

            self.plotBox.addWidget(self.pidGraph)
            self.plotBox.addWidget(self.pidGraphToolbar)
            self.plotBox.setAlignment(self.pidGraphToolbar, Qt.AlignCenter)

            if self.calcAvrgUCheckBox.isChecked():
                if not self.Parent.settingsWindow.onlyPIDGraphRadioButton.isChecked(
                ):
                    self.avrgULabel.setText("U: {0:.3f}".format(
                        numpy_mean(uArray)))
                else:
                    self.avrgULabel.setText("U: -")
            else:
                self.avrgULabel.setText("U: -")

            if self.calcAvrgPIDCheckBox.isChecked():
                if not self.Parent.settingsWindow.onlyUGraphRadioButton.isChecked(
                ):
                    self.avrgPIDLabel.setText("PID-output: {0:.3f}".format(
                        numpy_mean(pidArray)))
                else:
                    self.avrgPIDLabel.setText("PID-output: -")
            else:
                self.avrgPIDLabel.setText("PID-output: -")
예제 #35
0
def int_time():
    return int(ceil(time_time()))
예제 #36
0
def main():
    parser = argparse_ArgumentParser("Input parameters")
    parser.add_argument("--input_file_name",
                        default="input_toy.yaml",
                        help="Input parameters file name")
    parser.add_argument("--out_dir_name",
                        default="/results",
                        help="Output directory name")
    parser.add_argument("--train_test_files_dir",
                        default="",
                        help="Train test file path")
    parser.add_argument("--graph_files_dir",
                        default="",
                        help="Graph files' folder path")

    parser.add_argument("--n_pts",
                        default=1,
                        help="number of partitions (computers)")
    parser.add_argument(
        "--seed_mode",
        help="Seed mode - specify 'cliques' for the cliques algo")
    parser.add_argument("--search_method", help="Sampling algorithm")
    parser.add_argument("--model_dir", help="Directory containing model")
    parser.add_argument("--ptnum", default='0', help="partition number")
    parser.add_argument("--explore_prob",
                        default=0.01,
                        help="probability of exploring")
    parser.add_argument("--prob_metropolis",
                        default=0.1,
                        help="metropolis probability")
    parser.add_argument("--T0", default=0.88, help="isa T0")
    parser.add_argument("--alpha", default=1.8, help="isa alpha")
    parser.add_argument("--classi_thresh",
                        default=0.5,
                        help="Classification threshold")
    parser.add_argument("--transfer2tmp",
                        default=True,
                        help="Transfer to tmp folder")

    args = parser.parse_args()

    with open(args.input_file_name, 'r') as f:
        inputs = yaml_load(f, yaml_Loader)

    if args.classi_thresh:
        inputs['classi_thresh'] = float(args.classi_thresh)
    if args.seed_mode:
        inputs['seed_mode'] = args.seed_mode
    if args.search_method:
        inputs['search_method'] = args.search_method
    if args.model_dir:
        inputs['model_dir'] = args.model_dir
    if args.explore_prob:
        inputs['explore_prob'] = float(args.explore_prob)
    if args.prob_metropolis:
        inputs['prob_metropolis'] = float(args.prob_metropolis)
    if args.T0:
        inputs['T0'] = float(args.T0)
    if args.alpha:
        inputs['alpha'] = float(args.alpha)

    # Override output directory name if same as gen
    if args.out_dir_name or inputs['out_comp_nm'] == "/results/res":
        if not os_path.exists(inputs['dir_nm'] + args.out_dir_name):
            os_mkdir(inputs['dir_nm'] + args.out_dir_name)
        inputs['out_comp_nm'] = args.out_dir_name + "/res"

    inputs['train_test_files_dir'] = ''
    if args.train_test_files_dir:
        if not os_path.exists(inputs['dir_nm'] + args.train_test_files_dir):
            os_mkdir(inputs['dir_nm'] + args.train_test_files_dir)
        inputs['train_test_files_dir'] = args.train_test_files_dir

    inputs['graph_files_dir'] = ''
    if args.graph_files_dir:
        if not os_path.exists(inputs['dir_nm'] + args.graph_files_dir):
            os_mkdir(inputs['dir_nm'] + args.graph_files_dir)
        inputs['graph_files_dir'] = args.graph_files_dir

    with open(inputs['dir_nm'] + inputs['out_comp_nm'] + "_input_sample.yaml",
              'w') as outfile:
        yaml_dump(inputs, outfile, default_flow_style=False)

    logging_basicConfig(filename=inputs['dir_nm'] + inputs['out_comp_nm'] +
                        "_logs.yaml",
                        level=logging_INFO)
    # fin_list_graphs = control(myGraph,inputs,n=50)
    out_comp_nm = inputs['dir_nm'] + inputs['out_comp_nm']
    out_comp_nm_model = inputs['dir_nm'] + inputs['model_dir']

    modelfname = out_comp_nm_model + "_model"
    scalerfname = out_comp_nm_model + "_scaler"

    max_sizeF = inputs['dir_nm'] + inputs[
        'train_test_files_dir'] + "/res_max_size_search_par"
    with open(max_sizeF, 'rb') as f:
        max_size = pickle_load(f)

    with open(scalerfname, 'rb') as f:
        scaler = pickle_load(f)

    myGraph = None
    if inputs['seed_mode'] == "cliques":
        myGraphName = inputs['dir_nm'] + inputs[
            'graph_files_dir'] + "/res_myGraph"
        with open(myGraphName, 'rb') as f:
            myGraph = pickle_load(f)

    ptns = int(args.n_pts)
    if inputs['seed_mode'] == 'n_nodes':

        seed_nodes_dir = out_comp_nm + "_seed_nodes"
    else:
        seed_nodes_dir = inputs['dir_nm'] + inputs[
            'graph_files_dir'] + "/" + inputs['seed_mode'] + "_n_pts_" + str(
                ptns) + "/res_seed_nodes"

    seed_nodes_F = seed_nodes_dir + args.ptnum
    with open(seed_nodes_F, 'rb') as f:
        seed_nodes = pickle_load(f)

    start_time_sample = time_time()
    out_comp_nm = inputs['dir_nm'] + inputs['out_comp_nm']

    num_comp = sample(inputs, myGraph, modelfname, scaler, seed_nodes,
                      max_size, args.transfer2tmp)

    sample_time = time_time() - start_time_sample
    sample_time_avg = sample_time / num_comp
    folNm_out = "/tmp/" + out_comp_nm + "_orig_comps"  # CHECK WHICH NODE's TMP IS BEING USED

    pred_comp_list = [
        pickle_load(open(folNm_out + "/" + seed_node, 'rb'))
        for seed_node in seed_nodes
        if os_path.exists(folNm_out + "/" + seed_node)
    ]

    with open(out_comp_nm + "_pred_comp_list" + args.ptnum, "wb") as f:
        pickle_dump(pred_comp_list, f)
    tot_time = time_time() - start_time

    with open(out_comp_nm + '_runtime_performance.out', "a") as fid:
        print("--- Runtime performance ---", file=fid)
        print("Sample time (s) = ",
              sample_time,
              "[",
              round(100 * float(sample_time) / tot_time, 2),
              "%]",
              file=fid)
        print("Average sample time (s) = ", sample_time_avg, file=fid)
        print("Total time (s) = ", tot_time, file=fid)
예제 #37
0
# -*- coding: utf-8 -*-
"""
Created on Tue Mar  3 17:36:07 2020

@author: Meg_94
"""
from time import time as time_time

start_time = time_time()

from matplotlib import use as mpl_use

mpl_use('Agg')  # Issues warning on spyder - don't worry abt it
from os import path as os_path, mkdir as os_mkdir, chdir as os_chdir

os_chdir(os_path.dirname(os_path.abspath(__file__)))
from sys import path as sys_path

# insert at 1, 0 is the script path (or '' in REPL)
sys_path.insert(1, './functions_py3/')
from yaml import load as yaml_load, dump as yaml_dump, Loader as yaml_Loader
from argparse import ArgumentParser as argparse_ArgumentParser
from sample import sample
# from random_walk_control import control

from logging import basicConfig as logging_basicConfig, INFO as logging_INFO, DEBUG as logging_DEBUG
from pickle import load as pickle_load, dump as pickle_dump


def main():
    parser = argparse_ArgumentParser("Input parameters")
예제 #38
0
    def distrib_nn_for_cdf(self, ntss_tmp, bool_print: bool = False):
        """
        Calculates the two indicators, average and standard deviation of the distances, necessary for the use of the CDF of the normal distribution.
        The computation of these indicators are described in `Scoring Message Stream Anomalies in Railway Communication Systems, L.Foulon et al., 2019, ICDMWorkshop <https://ieeexplore.ieee.org/abstract/document/8955558>`_.

        :param numpy.ndarray ntss_tmp: Reference sequences
        :param boolean bool_print: and True, Displays the nodes stats on the standard output

        :returns:
        :rtype: list(numpy.ndarray, numpy.array)
        """

        start_time = time_time()
        node_list, node_list_leaf, node_leaf_ndarray_mean = self.get_list_nodes_and_barycentre(
        )
        if bool_print:
            print("pretrait node --- %s seconds ---" %
                  (time_time() - start_time))
            stdout.flush()
            print(len(node_list), " nodes whose ", len(node_list_leaf),
                  " leafs in tree")
            stdout.flush()

        nb_leaf = len(node_list_leaf)

        cdf_mean = np_zeros((nb_leaf, len(ntss_tmp)))
        cdf_std = np_zeros(nb_leaf)
        nb_ts_by_node = np_zeros(nb_leaf, dtype=np_uint32)
        centroid_dist = np_square(cdist(node_leaf_ndarray_mean, ntss_tmp))

        for num, node in enumerate(node_list_leaf):
            cdf_std[node.id_numpy_leaf] = np_mean(node.std)
            nb_ts_by_node[node.id_numpy_leaf] = node.get_nb_sequences()

        dist_list = np_array([np_zeros(i) for i in nb_ts_by_node],
                             dtype=object)

        # calcul distance au carre entre [barycentre et ts] du meme nœud
        """ TODO np.vectorize ?"""
        for node_nn in node_list_leaf:
            dist_list[node_nn.id_numpy_leaf] = cdist(
                [node_nn.mean], node_nn.get_sequences())[0]
        dist_list = np_square(dist_list)
        """ TODO np.vectorize ?"""
        for num, node in enumerate(node_list_leaf):
            node_id = node.id_numpy_leaf

            centroid_dist_tmp = centroid_dist[node_id]
            centroid_dist_tmp = centroid_dist_tmp.reshape(
                centroid_dist_tmp.shape + (1, ))
            centroid_dist_tmp = np_repeat(centroid_dist_tmp,
                                          nb_ts_by_node[node_id],
                                          axis=1)

            cdf_mean_tmp = np_add(centroid_dist_tmp, dist_list[node_id])
            cdf_mean[node_id] = np_sum(cdf_mean_tmp, axis=1)

        del dist_list
        del cdf_mean_tmp
        del centroid_dist_tmp

        cdf_mean = np_divide(cdf_mean.T, nb_ts_by_node)
        cdf_mean = np_sqrt(cdf_mean)

        self.cdf_mean = cdf_mean
        self.cdf_std = cdf_std
 def __init__(self, timeTickOffset=0):
     self.data = [None]*34
     self.externalStatus = None
     self.timeTickOffset = timeTickOffset
     self._timestamp = time_time()
     self.maxBytesRead = 0
예제 #40
0
def time():
    return time_time() - start