예제 #1
0
 def format(self, **kwargs):
     nowms = time() * 1000
     notify_every_ms = self.builder.notify_every_ms
     if self.dm.is_connected_state():
         f = self.dm_formatter
         timeout = 7
     else:
         f = self.__state_formatter__
         timeout = 45
     if f is not self.current_formatter or\
             f is self.__state_formatter__ or\
             notify_every_ms == 0 or\
             nowms - self.last_notify_ms >= notify_every_ms:
         self.current_formatter = f
         self.last_notify_ms = nowms
         txt = ''
         for types, obj in kwargs.items():
             if types == f.type:
                 txt = f.format(obj)
                 if txt:
                     break
         if txt:
             if self.timer:
                 self.timer.cancel()
             self.timer = Timer(timeout, self.clear)
             self._notify(txt)
예제 #2
0
    def run(self):
        super().run()
        query = args['query']

        options = os.listdir('../data/preprocessed/' + query)
        ind = -1
        if len(options) == 1:
            ind = 0
        else:
            for i, option in enumerate(options):
                print('({}) {}'.format(i, option))
            ind = input('Enter your option index: ')
            while not ind.isdigit() or int(ind) < 0 or int(ind) >= len(options):
                ind = input('Please enter a valid option: ')
            ind = int(ind)
        img_dir = '../data/preprocessed/' + query + '/' + options[ind]
        
        img_names = os.listdir(img_dir)
        progress_bar = ProgressBar(len(img_names), log=self.log)

        if not os.path.exists('../data/loaded/' + query):
            os.mkdir('../data/loaded/{}'.format(query))

        images = []

        while True:
            img = cv2.imread(img_dir + '/' + img_names[progress_bar.i])
            images.append(img)
            if progress_bar.increment(): break

        timer = Timer(log=self.log)

        images = timer.time(lambda:np.array(images), 'Converting to numpy array')

        timer.time(lambda:np.save('../data/loaded/{}/{}'.format(query, options[ind] + '.npy'), images), 'Saving numpy array to file')
예제 #3
0
def optimizeSimulationSize():
    """"""
    arr_shape = [239, 239, 55]

    psf_shape_gauss = [17, 17, 17]
    psf_shape_defocus = [17, 17, 33]

    shape1 = [i[0] + i[1] for i in zip(arr_shape, psf_shape_defocus)]
    shape2 = [i[0] + i[1] for i in zip(arr_shape, psf_shape_gauss)]
    arr1 = np.random.random(shape1)
    arr2 = np.random.random(shape2)

    print('Smaller Array:')

    with Timer(name='Real:{}'.format(arr1.shape)) as tim:
        f_arr = np.fft.rfftn(arr1)
        r_arr = np.fft.irfftn(f_arr)

    with Timer(name='Cpx:{}'.format(arr1.shape)) as tim:
        f_arr = np.fft.fftn(arr1)
        r_arr = np.fft.ifftn(f_arr)

    with Timer(name='Real:{}'.format(arr2.shape)) as tim:
        f_arr = np.fft.rfftn(arr2)
        r_arr = np.fft.irfftn(f_arr)

    with Timer(name='Cpx:{}'.format(arr2.shape)) as tim:
        f_arr = np.fft.fftn(arr2)
        r_arr = np.fft.ifftn(f_arr)
    '''
예제 #4
0
 def __get_percpu(self):
     """Update and/or return the per CPU list using the psutil library."""
     # Never update more than 1 time per cached_time
     if self.timer_percpu.finished():
         self.percpu_percent = []
         for cpu_number, cputimes in enumerate(psutil.cpu_times_percent(interval=0.0, percpu=True)):
             cpu = {'key': self.get_key(),
                    'cpu_number': cpu_number,
                    'total': round(100 - cputimes.idle, 1),
                    'user': cputimes.user,
                    'system': cputimes.system,
                    'idle': cputimes.idle}
             # The following stats are for API purposes only
             if hasattr(cputimes, 'nice'):
                 cpu['nice'] = cputimes.nice
             if hasattr(cputimes, 'iowait'):
                 cpu['iowait'] = cputimes.iowait
             if hasattr(cputimes, 'irq'):
                 cpu['irq'] = cputimes.irq
             if hasattr(cputimes, 'softirq'):
                 cpu['softirq'] = cputimes.softirq
             if hasattr(cputimes, 'steal'):
                 cpu['steal'] = cputimes.steal
             if hasattr(cputimes, 'guest'):
                 cpu['guest'] = cputimes.guest
             if hasattr(cputimes, 'guest_nice'):
                 cpu['guest_nice'] = cputimes.guest_nice
             # Append new CPU to the list
             self.percpu_percent.append(cpu)
             # Reset timer for cache
             self.timer_percpu = Timer(self.cached_time)
     return self.percpu_percent
예제 #5
0
def line_with_training():
    train_data = DriveDatasetLoader('D:/Datasets/DRIVE', 10).load_train()
    test_data = DriveDatasetLoader('D:/Datasets/DRIVE', 10).load_test()

    # op = cached_single_norm
    op = cached_multi_norm

    size = 15
    timer = Timer()

    green('line_with_training')
    timer.start('Train')
    thresh, train_acc = find_best_acc(op, train_data, size)
    train_auc = calc_auc(train_data, op, size)
    timer.stop()

    timer.start('Test')
    test_acc = accuracy(op, test_data, thresh, size)
    test_auc = calc_auc(test_data, op, size)
    timer.stop()

    green(f'Threshold: {thresh}')
    green(f'Train average accuracy: {train_acc}')
    green(f'Train average AUC: {train_auc}')
    green(f'Test average accuracy: {test_acc}')
    green(f'Test average AUC: {test_auc}')
예제 #6
0
def optic_with_training():
    train_data = DriveDatasetLoader('D:/Datasets/DRIVE', 10).load_train()
    test_data = DriveDatasetLoader('D:/Datasets/DRIVE', 10).load_test()

    # op = cached_single
    # thresh = 64

    op = cached_multi
    thresh = 119

    size = 15
    timer = Timer()

    green('optic_with_training')
    timer.start('Train')
    disk_thresh, train_acc = find_best_acc_optic(op, thresh, train_data, size)
    timer.stop()

    timer.start('Test')
    test_acc = get_accuracy_optic(op, test_data, thresh, disk_thresh)
    timer.stop()

    blue(f'Disk threshold: {disk_thresh}')
    blue(f'Train average accuracy: {train_acc}')
    blue(f'Test average accuracy: {test_acc}')
def get_ticker_comments(
    ticker: str,
    days_to_look_back: int = 1,
    subreddit_to_search: str = None,
    end_datetime: datetime = datetime.utcnow()
) -> List[str]:
    timer = Timer()
    timer.start()
    start_datetime, end_datetime = get_start_and_end_date(
        end_datetime, days_to_look_back)
    from_timestamp = int(start_datetime.timestamp())
    to_timestamp = int(end_datetime.timestamp())
    comments = []
    with requests.Session() as session:
        while True:
            try:
                api_call = create_api_call(ticker, from_timestamp,
                                           to_timestamp, subreddit_to_search)
                content = get_comments_from_api(session, api_call)
            except ApiError:
                break
            if len(content) < _API_SEARCH_RESULT_SIZE:
                break
            else:
                from_timestamp = content[_API_SEARCH_RESULT_SIZE -
                                         1].get("created_utc")
        comments.extend(get_comments_from_content(content))
    logger.debug(
        f"Found {len(comments)} comments of {ticker} with at least {_UPVOTE_THRESHOLD} upvotes from {start_datetime} to {end_datetime}."
    )
    logger.info(
        f"Analyzed {len(comments)} comments from ticker: {ticker} in {int(timer.end())} seconds"
    )
    return comments
예제 #8
0
class Runner(Unit):
    def __init__(self, coords, size):
        Unit.__init__(self, coords, size)
        self.timer = Timer()
        self.legs_are_open = True

    def in_default_position(self, dimensions):
        return self.coords.y >= dimensions.height / 2

    def jump(self):
        self.accelerate(Point(0, settings.jump_height * -1), 1)

    def get_pixels(self):
        self.legs_are_open = self.get_legs_status()
        pixel_map = list(open_legs_pixel_map if self.
                         legs_are_open else closed_legs_pixel_map)
        pixel_map.reverse()
        return pixel_map

    def get_legs_status(self):
        if self.velocity.y != 0:
            return True
        elif self.timer.elapsed_time() > 0.1:
            self.timer.set_last_tick_to_current_time()
            return not self.legs_are_open
        else:
            return self.legs_are_open
예제 #9
0
 def __get_cpu(self):
     """Update and/or return the CPU using the psutil library."""
     # Never update more than 1 time per cached_time
     if self.timer_cpu.finished():
         self.cpu_percent = psutil.cpu_percent(interval=0.0)
         # Reset timer for cache
         self.timer_cpu = Timer(self.cached_time)
     return self.cpu_percent
예제 #10
0
    def __init__(self, cached_time=1):
        self.cpu_percent = 0
        self.percpu_percent = []

        # cached_time is the minimum time interval between stats updates
        # since last update is passed (will retrieve old cached info instead)
        self.timer_cpu = Timer(0)
        self.timer_percpu = Timer(0)
        self.cached_time = cached_time
예제 #11
0
    def _get_coverage_slices(self, crs, gdal_coverage_converter):
        """
        Returns the slices for the collection of files given
        """
        crs_axes = CRSUtil(crs).get_axes(self.session.coverage_id)

        slices_dict = self.create_dict_of_slices(self.session.import_overviews)

        timeseries = self._generate_timeseries_tuples()
        count = 1
        for tpair in timeseries:
            file = tpair.file
            file_path = tpair.file.get_filepath()

            timer = Timer()

            # print which file is analyzing
            FileUtil.print_feedback(count, len(timeseries), file_path)
            if not FileUtil.validate_file_path(file_path):
                continue

            valid_coverage_slice = True

            gdal_file = GDALGmlUtil(file.get_filepath())
            try:
                subsets = GdalAxisFiller(crs_axes, gdal_file).fill(True)
                subsets = self._fill_time_axis(tpair, subsets)
            except Exception as ex:
                # If skip: true then just ignore this file from importing, else raise exception
                FileUtil.ignore_coverage_slice_from_file_if_possible(
                    file_path, ex)
                valid_coverage_slice = False

            if valid_coverage_slice:
                # Generate local metadata string for current coverage slice
                self.evaluator_slice = EvaluatorSliceFactory.get_evaluator_slice(
                    self.recipe_type, tpair.file)
                local_metadata = gdal_coverage_converter._generate_local_metadata(
                    subsets, self.evaluator_slice)
                if self.session.import_overviews_only is False:
                    slices_dict["base"].append(
                        Slice(subsets, FileDataProvider(tpair.file),
                              local_metadata))

                # Then, create slices for selected overviews from user
                for overview_index in self.session.import_overviews:
                    subsets_overview = self.create_subsets_for_overview(
                        subsets, overview_index, gdal_file)

                    slices_dict[str(overview_index)].append(
                        Slice(subsets_overview, FileDataProvider(file),
                              local_metadata))

            timer.print_elapsed_time()
            count += 1

        return slices_dict
예제 #12
0
 def on_event_state_transition(self, dm, oldstate, newstate, reason):
     if self.connectors_format:
         TcpClient.format(dm.get_device(), state=newstate, manager=dm)
     self.change_service_notification(dm, state=newstate, manager=dm)
     uid = dm.get_uid()
     if uid in self.devicemanagers_active_info:  # assenza significa che stiamo facendo una ricerca
         info = self.devicemanagers_active_info[uid]
         from device.manager import GenericDeviceManager
         if GenericDeviceManager.is_connected_state_s(
                 newstate) and oldstate == DEVSTATE_CONNECTING:
             if info['operation'] == 'c':
                 if dm in self.devicemanagers_active:
                     self.devicemanagers_active.remove(dm)
                     self.devicemanagers_active_done.append(dm)
                 self.set_operation_ended(info)
             Timer(
                 0,
                 partial(self.start_remaining_connection_operations,
                         bytimer=False))
         elif oldstate == DEVSTATE_CONNECTING and newstate == DEVSTATE_DISCONNECTED:
             if reason == DEVREASON_PREPARE_ERROR or reason == DEVREASON_BLE_DISABLED:
                 for dm in self.devicemanagers_active:
                     info = self.devicemanagers_active_info[dm.get_uid()]
                     self.set_operation_ended(info)
                     if dm not in self.devicemanagers_active_done:
                         self.devicemanagers_active_done.append(dm)
                 del self.devicemanagers_active[:]
             else:
                 Timer(
                     0,
                     partial(self.start_remaining_connection_operations,
                             bytimer=False))
         elif (GenericDeviceManager.is_connected_state_s(oldstate)
               or oldstate == DEVSTATE_DISCONNECTING
               ) and newstate == DEVSTATE_DISCONNECTED:
             oper = 'c' if info['operation'] != 'd' else 'd'
             if reason != DEVREASON_REQUESTED:
                 info['operation'] = oper
                 if dm in self.devicemanagers_active_done:
                     self.devicemanagers_active_done.remove(dm)
                 if dm not in self.devicemanagers_active:
                     self.devicemanagers_active.append(dm)
                     GenericDeviceManager.sort(self.devicemanagers_active)
             else:
                 self.set_operation_ended(info)
                 self.main_session = None
                 if dm in self.devicemanagers_active:
                     self.devicemanagers_active.remove(dm)
                     self.devicemanagers_active_done.append(dm)
             Timer(
                 0,
                 partial(self.start_remaining_connection_operations,
                         bytimer=False))
예제 #13
0
    def draw_vertices(self):
        """Generates the vertices based on the given lsystem and level"""

        with self._updater:
            with Timer("Node gen", True):
                for command in self._lsystem.start.iterate(self._level):
                    self._updater.update_tick()
                    self._handle_command(command)

        self._print_timings()

        with Timer("Node apply", True):
            self._apply_node()
예제 #14
0
def init_daemon_proc(share=None):
    '''
    启动守护进程
    '''
    if share: ProcessArgs(share)
    sysm = SystemManager()
    ProcessManager().add_process('server', init_server_proc, (sysm, ))
    ProcessManager().run_all_process()
    Timer().add_task(
        lambda name='server': ProcessManager().check_heartbeat(name),
        conf.DAEMON_HEATBEAT_RATE,
        True)
    Timer().run_ever()
예제 #15
0
  def __init__(self, image_path, sprite_size, frames = 1, scale = 1, animation_speed = 10, start_frame = 0):
    super().__init__()
    self._sheet, self._sheet_rect = load_image(image_path, scale)

    self.scale = scale
    self.sprite_size = sprite_size
    self.scaled_size = sprite_size * scale
    self.rect = Rect(0, 0, sprite_size * scale, sprite_size * scale)

    self.animation_frame = start_frame
    self.animation_max = frames - 1
    self.animation_speed = animation_speed
    self.animation_timer = Timer()
예제 #16
0
def main(args):
    layers = [int(l) for l in args.layers.split(",")]

    Model = get_model_cls_by_type(args.type)
    DataLoader = get_data_loader_cls_by_type(args.type)

    data_loader = DataLoader(args.file, args.nc, args.size, args.batch_size)
    model = Model(layers, args.nc, args.omega)
    optimizer = JaxOptimizer("adam", model, args.lr)

    name = args.file.split(".")[0]
    logger = Logger(name)
    logger.save_option(vars(args))

    gt_img = data_loader.get_ground_truth_image()
    logger.save_image("original", data_loader.original_pil_img)
    logger.save_image("gt", gt_img)

    iter_timer = Timer()
    iter_timer.start()

    def interm_callback(i, data, params):
        log = {}
        loss = model.loss_func(params, data)
        log["loss"] = float(loss)
        log["iter"] = i
        log["duration_per_iter"] = iter_timer.get_dt() / args.print_iter

        logger.save_log(log)
        print(log)

    print("Training Start")
    print(vars(args))

    total_timer = Timer()
    total_timer.start()
    last_data = None
    for _ in range(args.epoch):
        data_loader = DataLoader(args.file, args.nc, args.size,
                                 args.batch_size)
        for data in data_loader:
            optimizer.step(data)
            last_data = data
            if optimizer.iter_cnt % args.print_iter == 0:
                interm_callback(optimizer.iter_cnt, data,
                                optimizer.get_optimized_params())

    if not optimizer.iter_cnt % args.print_iter == 0:
        interm_callback(optimizer.iter_cnt, data,
                        optimizer.get_optimized_params())

    train_duration = total_timer.get_dt()
    print("Training Duration: {} sec".format(train_duration))
    logger.save_net_params(optimizer.get_optimized_params())
    logger.save_losses_plot()
예제 #17
0
    def __init__(self, game):
        super().__init__(game, game.display.get_size())
        self.game.events.register_listener('ENEMY_KILLED', self.increase_score)
        self.game.events.register_listener('SWARM_DESTROYED',
                                           self.on_swarm_destroyed)
        self.game.events.register_listener('PLAYER_HEALTH_UPDATE',
                                           self.write_health)
        self.game.events.register_listener('PLAYER_DESTROYED',
                                           self.on_player_destroyed)

        self.background = Stars(self, 250)
        self.player = Player(self)
        self.player.rect.move_ip(
            self.game.display.get_size()[0] / 2 - self.player.rect.width / 2,
            self.game.display.get_size()[1] - self.player.rect.height - 20)

        self.enemies = Group()
        self.projectiles = Group()
        self.enemy_projectiles = Group()
        self.score = 0

        self.game_over = False
        self.show_overlay = False

        self.font = Font(get_asset_path('dpcomic.ttf'), 32)
        self.write_score()
        self.write_health()

        self.laser_sound = load_sound('laser.ogg')
        self.laser_sound.set_volume(0.2)

        self.waves = [
            Swarm(self, 'waves/1.png'),
            Swarm(self, 'waves/2.png'),
            Swarm(self, 'waves/3.png'),
            Swarm(self, 'waves/4.png'),
            Swarm(self, 'waves/5.png'),
            Swarm(self, 'waves/6.png'),
            Swarm(self, 'waves/7.png'),
        ]

        self.spawn_timer = Timer()
        self.current_wave_id = 1
        self.current_wave = self.waves.pop(0)
        self.current_wave_active = False
        self.spawn_swarm(self.current_wave)

        self.overlay, _ = load_image('overlay.png',
                                     width=self.game.get_width(),
                                     height=self.game.get_height())
예제 #18
0
def optimizeSimulationSize2():
    """"""

    shape1 = 517
    shape2 = 129

    for i in range(20):
        arr = np.random.random([shape1, shape1, shape2])
        with Timer(name='Real:{}'.format([shape1, shape1, shape2])):
            f_arr = np.fft.rfftn(arr)
            r_arr = np.fft.irfftn(f_arr)
        with Timer(name='Cpx:{}'.format([shape1, shape1, shape2])):
            f_arr = np.fft.rfftn(arr)
            r_arr = np.fft.irfftn(f_arr)
        shape1 -= 2
예제 #19
0
 async def _do_connect(hp, loop, template_file):
     dd = None
     async with TcpClient._LOCK:
         dd = TcpClient._OPEN_CLIENTS[f'{hp[0]}:{hp[1]}']
     try:
         _LOGGER.debug(
             f'Trying to _TCPconnect {hp[0]}:{hp[1]} ({template_file})')
         await asyncio.wait_for(
             loop.create_connection(
                 partial(TcpClient,
                         hp=hp,
                         loop=loop,
                         template_file=template_file), hp[0], hp[1]), 7.0)
     except asyncio.CancelledError:
         pass
     except (OSError, Exception):
         if not dd['stopped']:
             _LOGGER.debug(
                 f'Error connecting to {hp[0]}:{hp[1]}: {traceback.format_exc()}'
             )
             dd['timer'] = Timer(
                 TcpClient.RECONNECT_INTERVAL,
                 partial(TcpClient._do_connect, hp, loop, template_file))
             return
     dd['event'].set()
class LoggingStateProviderWithListeners:

    def __init__(self, name):
        self.log = logging.getLogger(name)
        self.__listeners = []
        self.__timer = Timer()

    def registerListener(self, listener):
        self.log.debug("Appending listener: " + listener.name)
        self.__listeners.append(listener)

    # Notifies all listeners after the state changed. Measures the time between updates to give it to listeners.
    # Should be called by the child class.
    def notify_listeners(self, newstate):

        # newstate = self._read_state(self.imu.getIMUData())

        # new state is ready, we can print it
        self.log.debug("Notify listeners of new state")
        self.log.debug("%s", newstate)

        # give new state to listeners
        time_since_last_update = self.__timer.readAndReset()
        for listener in self.__listeners:
            listener.new_state(time_since_last_update, newstate)
예제 #21
0
 def fetch(self, url, params=None, data=None, **request_params):
     try:
         with Timer(logger=self.log,
                    name='Fetching URL %s with params %r' % (url, params)):
             response = self.session.request(
                 'post' if data else 'get',
                 url,
                 params=params,
                 data=data,
                 headers={
                     'User-Agent':
                     'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_3) AppleWebKit/601.4.4 (KHTML, like Gecko) Version/9.0.3 Safari/601.4.4'
                 },
                 **request_params)
             response.raise_for_status()
             self.save_cookies()
             return response
     except Timeout as e:
         raise ScraperError(32000,
                            "Timeout while fetching URL: %s (%%s)" % url,
                            lang(30000),
                            cause=e)
     except NoValidProxiesFound as e:
         raise ScraperError(32005, "Can't find anonymous proxy", cause=e)
     except RequestException as e:
         raise ScraperError(32001,
                            "Can't fetch URL: %s (%%s)" % url,
                            lang(30000),
                            cause=e)
     except ProxyListException as e:
         plugin.set_setting('use-proxy', 0)
         raise ScraperError(32004,
                            "Can't load anonymous proxy list",
                            cause=e)
예제 #22
0
 def handle_device(self,
                   address,
                   uid,
                   callback,
                   *args,
                   timeout=-1,
                   do_split=False,
                   **kwargs):
     self.unhandle_device(address, uid)
     d = self.callbacks[address] if address in self.callbacks else dict()
     if timeout > 0:
         t = Timer(timeout, partial(self.unhandle_by_timer, address, uid))
     else:
         t = None
     if do_split:
         if address == COMMAND_SPLIT:
             kwargs = dict(**kwargs)
         else:
             kwargs = dict(split=0, strsplit='')
     else:
         kwargs = dict(split=False)
     d[uid] = dict(f=callback, a=args, t=t, **kwargs)
     self.callbacks[address] = d
     _LOGGER.debug(
         f'Handle Added add={address}, uid={uid} timeout={timeout} result={self.callbacks}'
     )
예제 #23
0
파일: test.py 프로젝트: donghaoye/fashionHD
def test_AttributeDataset():
    from data_loader import CreateDataLoader
    from options.attribute_options import TrainAttributeOptions, TestAttributeOptions

    timer = Timer()

    timer.tic()
    opt = TrainAttributeOptions().parse()
    loader = CreateDataLoader(opt)
    print('cost %.3f sec to create data loader.' % timer.toc())

    loader_iter = iter(loader)
    data = loader_iter.next()

    for k, v in data.iteritems():
        print('data["%s"]: %s' % (k, type(v)))
예제 #24
0
 def browse_episodes(self, skip=0):
     self.ensure_authorized()
     doc = self.fetch(self.BASE_URL + "/browse.php", {'o': skip})
     with Timer(logger=self.log, name='Parsing episodes list'):
         body = doc.find('div', {'class': 'content_body'})
         series_titles = body.find('span', {'style': 'font-family:arial;.*?'}).strings
         titles = body.find('span', {'class': 'torrent_title'}).strings
         episode_titles, original_titles = zip(*[parse_title(t) for t in titles])
         release_dates = body.find('b').strings[1::3]
         release_dates = [str_to_date(d, '%d.%m.%Y %H:%M') for d in release_dates]
         selected_page = body.find('span', {'class': 'd_pages_link_selected'}).text
         last_page = body.find('a', {'class': 'd_pages_link'}).last.text
         self.has_more = int(selected_page) < int(last_page)
         icons = body.find('img', {'class': 'category_icon'}).attrs('src')
         onclicks = body.find('a', {'href': 'javascript:{};'}).attrs('onClick')
         series_ids, season_numbers, episode_numbers = zip(*[parse_onclick(s or "") for s in onclicks])
         posters = [poster_url(i[0][18:-5], i[1]) for i in zip(icons, season_numbers)]
         icons = [self.BASE_URL + url for url in icons]
         images = [url.replace('/icons/cat_', '/posters/poster_') for url in icons]
         data = zip(series_ids, series_titles, season_numbers,
                    episode_numbers, episode_titles, original_titles, release_dates, icons, posters, images)
         episodes = [Episode(*e) for e in data if e[0]]
         self.log.info("Got %d episode(s) successfully" % (len(episodes)))
         self.log.debug(repr(episodes).decode("unicode-escape"))
     return episodes
예제 #25
0
파일: gatt.py 프로젝트: p3g4asus/pymoviz
 def operation_timer_init(self, timeout=False, handler=None):
     if self.operation_timer:
         self.operation_timer.cancel()
     if timeout:
         if not handler:
             handler = partial(self.inner_disconnect, reason=DEVREASON_TIMEOUT)
         self.operation_timer = Timer(timeout, handler)
예제 #26
0
 def get_series_episodes(self, series_id):
     doc = self._get_series_doc(series_id)
     episodes = []
     with Timer(logger=self.log,
                name='Parsing episodes of series with ID %d' % series_id):
         body = doc.find('div', {'class': 'mid'})
         series_title, original_title = parse_title(
             body.find('h1').first.text)
         image = self.BASE_URL + body.find('img').attr('src')
         icon = image.replace('/posters/poster_', '/icons/cat_')
         episode_divs = body.find('div', {'class': 't_row.*?'})
         series_poster = None
         for ep in episode_divs:
             title_td = ep.find('td', {'class': 't_episode_title'})
             episode_title, orig_title = parse_title(title_td.text)
             onclick = title_td.attr('onClick')
             release_date = ep.find('span', {
                 'class': 'micro'
             }).find('span')[0].text
             release_date = str_to_date(
                 release_date, '%d.%m.%Y') if release_date else None
             _, season_number, episode_number = parse_onclick(onclick)
             poster = poster_url(original_title, season_number)
             if not series_poster:
                 series_poster = poster
             episode = Episode(series_id, series_title, season_number,
                               episode_number, episode_title, orig_title,
                               release_date, icon, poster, image)
             episodes.append(episode)
         self.log.info("Got %d episode(s) successfully" % (len(episodes)))
         self.log.debug(repr(episodes).decode("unicode-escape"))
     return episodes
예제 #27
0
 def get_series_bulk(self, series_ids):
     """
     :rtype : dict[int, Series]
     """
     if not series_ids:
         return {}
     cached_details = self.series_cache.keys()
     not_cached_ids = [
         _id for _id in series_ids if _id not in cached_details
     ]
     results = dict((_id, self.series_cache[_id]) for _id in series_ids
                    if _id in cached_details)
     if not_cached_ids:
         with Timer(logger=self.log,
                    name="Bulk fetching series with IDs " +
                    ", ".join(str(i) for i in not_cached_ids)):
             with ThreadPoolExecutor(
                     max_workers=self.max_workers) as executor:
                 futures = [
                     executor.submit(self.get_series_info, _id)
                     for _id in not_cached_ids
                 ]
                 for future in as_completed(futures):
                     result = future.result()
                     self.series_cache[result.id] = results[
                         result.id] = result
     return results
예제 #28
0
 def rearm_fomat_timer(self):
     if self.timer_format:
         self.timer_format.cancel()
         self.timer_format = None
     if self.formatter.timeouttime > 0:
         self.timer_format = Timer(self.formatter.timeouttime,
                                   self.set_timeout)
예제 #29
0
 def fetch(self, url, params=None, data=None, **request_params):
     try:
         with Timer(logger=self.log,
                    name='Fetching URL %s with params %r' % (url, params)):
             time.sleep(1)
             response = self.session.request('post' if data else 'get',
                                             url,
                                             params=params,
                                             data=data,
                                             **request_params)
             response.raise_for_status()
             self.save_cookies()
             return response
     except Timeout as e:
         raise ScraperError(32000,
                            "Timeout while fetching URL: %s (%%s)" % url,
                            lang(30000),
                            cause=e)
     except NoValidProxiesFound as e:
         raise ScraperError(32005, "Can't find anonymous proxy", cause=e)
     except RequestException as e:
         raise ScraperError(32001,
                            "Can't fetch URL: %s (%%s)" % url,
                            lang(30000),
                            cause=e)
     except ProxyListException as e:
         plugin.set_setting('use-proxy', 0)
         raise ScraperError(32004,
                            "Can't load anonymous proxy list",
                            cause=e)
예제 #30
0
 def _set_ble_interface(self):
     self._ble = self._oscer
     if self._init_oscer:
         Timer(0, partial(
             self._oscer.init,
             on_init_ok=self.on_osc_init_ok,
             on_connection_timeout=self.on_connection_timeout))
예제 #31
0
    def __init__(self, scene):
        super().__init__("player.png", 32, 4, 3, 100)
        self.scene = scene
        self.game = self.scene.game
        self.game.events.register_listener('PLAYER_HIT', self.on_hit)

        self.thrust = .8
        self.speed_max = 15
        self.speed_decay = .95
        self.movement_clamp = 10
        self.velocity = pygame.math.Vector2(0, 0)
        self.shoot_delay = 250
        self.shoot_timer = Timer()
        self.shoot_cooldown = False
        self.health = 100
        self.weapon_power = 1
예제 #32
0
#filters
parser.add_argument("-p", help="Enables the punctuation filter.",action="store_true")
parser.add_argument("-e", help="Enables the tag remover.",action="store_true")
parser.add_argument("-r", help="Enables the repeating letters' filter.",action="store_true")
parser.add_argument("-s", help="Enables the stopwords' filter (using nltk's stopwords list).",action="store_true")
parser.add_argument("-t", help="Enables the stopwords' filter (using pos tag).",action="store_true")
parser.add_argument("-u", help="Enables the url filter.",action="store_true")
parser.add_argument("-n", help="Enables the not adapter.",action="store_true")
args = parser.parse_args()

######### Object creation


# timer used for timing
timer = Timer()

# a preprocessor
preprocess = Preprocessor( )

print ""
print ""
# Classifier selection
if (args.classifier == 'svm' ):
	from classifier.svmClassifier import SVMClassifier
	print "Classifier:	SVM Classifier."
	classifier = SVMClassifier()
else:
	if (args.classifier == 'bayes' ):
		from classifier.bayesianClassifier import BayesianClassifier
		print "Classifier:	Bayesian Classifier."
 def __init__(self, name):
     self.log = logging.getLogger(name)
     self.__listeners = []
     self.__timer = Timer()
예제 #34
0
from classifier.shortTextClassifier import ShortTextClassifier
from classifier.bayesianClassifier import BayesianClassifier
from classifier.svmClassifier import SVMClassifier
#from classifier.linearClassifier import LinearClassifier

# parameters
file = ["tweeti-b", "tweeti-b.dev"]
#file = ["tweeti-b.dev" ]
numOfBins = 10

# adjust all path
originalFile = map(lambda path: get_project_dir() + "resources/tweeti/" + path + ".tsv", file)
conllFile = map(lambda path: get_project_dir() + "resources/conll/" + path + ".conll", file)

# timer used for timing
timer = Timer()

# classifiers to test
classifiers = {"ShortTextClassifier": ShortTextClassifier(), "SVMClassifier": SVMClassifier(), "Bayes": BayesianClassifier()}
#classifiers = {"LinearClassifier": LinearClassifier()}

#classifiers = {"Bayes": BayesianClassifier()}
# loading and processing data set
timer.start()
labeled_featuresets = read_conll_file(originalFile, conllFile, Pipeline()).values()

validator = CrossValidator(labeled_featuresets)
print "Elapsed time for data set processing: %.0fs\n" % (timer.stop()/1000)

# test the classifiers
for classifierName in classifiers: