コード例 #1
0
    def update(self, doc_id, body, params=None):
        try:
            logger.debug("ES:%s,update=%s,=%s" % (self.doc_type, doc_id, body))

            try:
                body["doc"]['@ts'] = get_now_ts()
            except Exception as e:
                logger.error("update error=%s" % e, exc_info=True)

            # VersionConflictEngineException
            if params is None:
                params = {}
            params['retry_on_conflict'] = 5

            ret = self.es.update(index=self.index_name,
                                 id=doc_id,
                                 doc_type=self.doc_type,
                                 body=body,
                                 params=params)
            # ret = self.es.update(index=self.index_name, id=doc_id, body=body, params=params)
        except Exception as e:
            logger.warn("update:%s,%s,%s,%s" % (doc_id, body, params, e),
                        exc_info=True)
            return None
        return ret
コード例 #2
0
ファイル: node.py プロジェクト: qq431169079/JupyterNotebook
    def _parse_shares(self, msg_type, shares, expected_parties=None):
        """Reads shares received from other nodes.
        Expects that all messages are of @msg_type, containing serialized objects.
        @shares - a dict / list of length _n_parties holding current party->share/None mapping, that will be updated during the run.
        @expected_parties - a list of party ids of which inputs are to be received. If None - all parties needed.
        Returns True iff all expected shares received.
        """

        m = self._io.get_data_msg(msg_type)
        while m is not None:
            assert m.get_type(
            ) == msg_type, "Unexpected message received, expecting %d:\n%r" % (
                msg_type, m)
            data = m.get_msg()
            share = unserialize(data)
            src = m.get_src()
            if shares[src] is not None:
                log.warn("Src %d already sent a share...", src)
            shares[src] = share
            m = self._io.get_data_msg(msg_type)

        # Are we done?
        if expected_parties is None:
            expected_parties = xrange(self._n_parties)
        remaining = [p for p in expected_parties if (shares[p] is None)]
        return (remaining == [])
コード例 #3
0
    def __call__(self, _handler):
        """ gets called when we class decorate
        """

        for item in self._routes:
            if item.get('uri') == self.uri:
                logger.error('uri aleady exists! uri:',
                             self.uri,
                             'name:',
                             self.name,
                             'handler:',
                             _handler,
                             caller=self)

            if item.get('name') == self.name:
                logger.warn('name aleady exists! uri:',
                            self.uri,
                            'name:',
                            self.name,
                            'handler:',
                            _handler,
                            caller=self)
        self._routes.append({
            'uri': self.uri,
            'name': self.name,
            'handler': _handler
        })

        return _handler
コード例 #4
0
ファイル: constant.py プロジェクト: berenm/gentulu
  def update(self, o):
    self.in_files.update(o.in_files)

    for k, v in o.other:
      if k in self.other and self.other[k] != v:
        log.warn('updating current constant %s = %s to %s' % (k, self.other[k], v))
      self.other[k] = v
コード例 #5
0
def refresh_token(refresh_tk, recursion=True):
    auth_point = "{}://{}".format(PAN_SERVICE['protocol'],
                                  PAN_SERVICE['auth_domain'])
    path = "token"
    params = {
        "grant_type": 'refresh_token',
        "refresh_token": refresh_tk,
        "client_id": PAN_SERVICE["client_id"],
        "client_secret": PAN_SERVICE["client_secret"]
    }
    headers = {"User-Agent": "pan.baidu.com"}
    rs = requests.get("%s%s" % (auth_point, path),
                      params=params,
                      headers=headers)
    # logger.info("refresh_token request state:{}".format(rs.status_code))
    # print("content:", rs.content)
    logger.info("restapi refresh_token:{}, status_code:{}".format(
        refresh_tk, rs.status_code))
    if rs.status_code == 200:
        jsonrs = rs.json()
        return jsonrs
    else:
        logger.warn(rs.content)
        if recursion:
            time.sleep(1)
            return refresh_token(refresh_tk, False)
        else:
            return {}
コード例 #6
0
 def create_db(self, conn):
     """ this optional schema will be used by the application """
     if not self.mysql_db:
         log.warn('No default database configured.')
         return
     sql = 'CREATE DATABASE IF NOT EXISTS `{}`;'.format(self.mysql_db)
     self.execute(sql, conn=conn)
コード例 #7
0
    def _init_middlewares(self):
        """ 加载中间件
        """
        logger.info('load middleware start >>>', caller=self)
        middlewares = []

        for m in self.middlewares:
            l = m.split('.')
            class_name = l[-1]
            model = '.'.join(l[:-1])
            mo = __import__(model, {}, {}, ['classes'])
            middleware = getattr(mo, class_name)
            instance = middleware()

            if not isinstance(instance, Middleware):
                logger.warn(
                    'middleware must inherit from tbag.core.middleware.Middleware:',
                    m,
                    caller=self)

                continue
            middlewares.append(instance)
            logger.info('middleware:', middleware, caller=self)
        options.define('middlewares',
                       middlewares,
                       help='set web api middlewares')
        logger.info('load middleware done <<<', caller=self)
コード例 #8
0
 def update_fields(self, doc_id, **kwargs):
     try:
         ret = self.update(doc_id, {"doc": kwargs})
     except Exception as e:
         logger.warn("update_fields:%s,%s,%s" % (doc_id, kwargs, e),
                     exc_info=True)
         return None
     return ret
コード例 #9
0
ファイル: actions.py プロジェクト: codenrhoden/rdopkg
def query(filter, package, verbose=False):
    r = _query.query_rdo(filter, package, verbose=verbose)
    if not r:
        log.warn('No distrepos information in rdoinfo for %s' % filter)
        return
    if verbose:
        print('')
    _query.pretty_print_query_results(r)
コード例 #10
0
ファイル: common.py プロジェクト: grafolean/grafolean
def _bg_mqtt_publish_done(fn):
    if fn.cancelled():
        log.warn("MQTT publishing: cancelled")
        return
    ex = fn.exception()
    if ex:
        log.error(
            f"MQTT publishing: exception {''.join(traceback.format_exception(None, ex, ex.__traceback__))}"
        )
コード例 #11
0
ファイル: common.py プロジェクト: grafolean/grafolean
def mqtt_publish_changed_multiple_payloads(topics_with_payloads):
    if not MQTT_HOSTNAME:
        log.warn("MQTT not connected, not publishing change")
        return
    superuserJwtToken = SuperuserJWTToken.get_valid_token(
        'backend_changed_notif')
    future_response = executor.submit(_bg_mqtt_publish, topics_with_payloads,
                                      superuserJwtToken)
    future_response.add_done_callback(_bg_mqtt_publish_done)  # log any errors
コード例 #12
0
 def allow(self, value):
     assert isinstance(value, list)
     for v in value:
         newv = []
         if v in self.supported():
             newv.append(v)
         else:
             log.warn("Excluding unsupported "+self.name+" value '"+v+"'")
     allowed_table[self.name] = newv
コード例 #13
0
def run(args):

    check_output_dir()
    output_filename = str(args.name).lower().replace(" ", "_")
    full_output_filename = os.path.join("results", output_filename + ".full.ft")
    training_output_filename = os.path.join("results", output_filename + ".train.ft")
    test_output_filename = os.path.join("results", output_filename + ".test.ft")

    try:
        annotations = pd.read_csv(args.input)
    except Exception as e:
        raise Exception("Cannot read input file. Encountered error: {}".format(str(e)))

    if 0.01 > args.split > 0.99:
        raise Exception("Invalid split. Value should be between 0.01 and 0.99")

    msk = np.random.rand(len(annotations)) < (1-args.split)
    training_set = annotations[msk]
    test_set = annotations[~msk]

    log.info("Size of training set: {}".format(len(training_set)))
    log.info("Size of test set:     {}".format(len(test_set)))

    if len(training_set) == 0:
        raise Exception("Training set is empty")

    if len(test_set) == 0:
        raise Exception("Test set is empty")

    log.info("Writing full data to {}".format(full_output_filename))
    with open(full_output_filename, "w+") as out_full:


        log.info("Writing training data to {}".format(training_output_filename))
        with open(training_output_filename, 'w+') as out:

            for index, row in training_set.iterrows():
                try:
                    out.write(concat_label_and_text(row['label'], preprocess_text(row['text']), args.label)+'\n')
                    out_full.write(concat_label_and_text(row['label'], preprocess_text(row['text']), args.label) + '\n')
                except:
                    log.warn("Encountered an invalid row, skipping.")
                    pass

        log.info("Writing test data to {}".format(test_output_filename))
        with open(test_output_filename, 'w+') as out:

            for index, row in test_set.iterrows():
                try:
                    out.write(concat_label_and_text(row['label'], preprocess_text(row['text']), args.label)+'\n')
                    out_full.write(concat_label_and_text(row['label'], preprocess_text(row['text']), args.label) + '\n')
                except:
                    log.warn("Encountered an invalid row, skipping.")
                    pass

    log.info("Success")
コード例 #14
0
    def get_page_in_other_languages(self, soup):
        try:
            div_lang = soup.find("div", {'id': 'p-lang'})
            li_list = div_lang.find_all("li")

            for li in li_list:
                a = li.find('a', href=True)
                self.queue.append(a['href'])
        except AttributeError:
            log.warn("Other languages not found!")
コード例 #15
0
    def update_field(self, doc_id, field, value):
        try:
            ret = self.update(doc_id, {"doc": {field: value}})
        except Exception as e:
            logger.warn(
                "update_field, doc_id: %s, field: %s, value: %s, error: %s" %
                (doc_id, field, value, e),
                exc_info=True)
            return None

        return ret
コード例 #16
0
ファイル: parameter.py プロジェクト: berenm/gentulu
def reduce_parameters():
  for p in raw_parameters:
    li = library.get(p.stack.library_name)
    gr = li.groups[p.stack.extension_name.upper()]
    ex = gr.extensions[p.stack.extension_name.lower()]
#    ca = ex.categories[p.stack.category_name.lower()]
    ca = ex.categories['functions']
    fn = ca.functions[p.stack.function_name]

    if not p.name in fn.parameters:
      fn.parameters[p.name] = parameter(p)
    else:
      log.warn('P %s already in %s.%s.%s.%s' % (p.name, li.name, ex.name, ca.name, fn.name))
コード例 #17
0
 def delete(self, doc_id):
     try:
         logger.debug("ES:%s,delete=%s" % (self.doc_type, doc_id))
         ret = self.es.delete(index=self.index_name,
                              id=doc_id,
                              doc_type=self.doc_type)
         # ret = self.es.delete(index=self.index_name, id=doc_id)
     except exceptions.NotFoundError:
         logger.warn("not found doc:{}".format(doc_id))
         ret = None
     except Exception as e:
         logger.warn("delete:%s,%s" % (doc_id, e), exc_info=True)
         ret = None
     return ret
コード例 #18
0
def get_links(html):
    parser = etree.HTMLParser()
    
    try:
        tree = etree.fromstring(html, parser=parser)
    except XMLSyntaxError as ex:
        log.warn('html parsing error')
        return []

    if tree is None:
        log.warn("html not parsed")
        return []
    links = tree.xpath('//a/@href')
    return links
コード例 #19
0
    def get_history(self, article_url, lang):
        history = []
        pages = []

        base_url = '{uri.scheme}://{uri.netloc}'.format(uri=urlparse(article_url))

        history_page_url = "{0}/w/index.php?title={1}&offset=&limit=500&action=history".format(base_url, unquote(
            article_url[len(base_url + '/wiki/'):]))

        response_s = self.load_page_by_url(history_page_url)
        if not response_s:
            return

        soup = BeautifulSoup(response_s.text, 'html.parser')

        while True:
            if pages:
                response = self.load_page_by_url(pages.pop(0))
                if not response:
                    return

                soup = BeautifulSoup(response.text, 'html.parser')

            try:
                ul_history = soup.find("ul", {'id': 'pagehistory'})
                li_list = ul_history.find_all("li")
                for element in li_list:
                    user = element.find("bdi").text

                    try:
                        wiki_date = element.find("a", {'class': 'mw-changeslist-date'}).text
                    except AttributeError:
                        wiki_date = element.find("span", {'class': 'history-deleted'}).text

                    if lang in self.formatter.lang_support_default:
                        sql_date = self.formatter.convert_date(lang, wiki_date)
                        history.append((sql_date, user))
                    else:
                        history.append((None, user, wiki_date))
            except AttributeError as e:
                log.warn(e)

            try:
                next_url = soup.find("a", {'rel': 'next'})['href']
                if next_url:
                    next_url = base_url + next_url
                    pages.append(next_url)
            except TypeError:
                break
        return history
コード例 #20
0
    def load_page_by_url(self, full_url, sleep_time=10):
        count_retry = 1

        try:
            while not utils.is_connected():
                if count_retry in range(6):
                    log.warn("NO INTERNET, Short retry [{0}/5], Next try -> {1} sec".format(count_retry, sleep_time))
                    time.sleep(sleep_time)
                elif count_retry in range(11):
                    long_sleep_time = sleep_time * 180
                    log.warn(
                        "NO INTERNET, Long retry [{0}/5], Next try -> {1} sec".format(count_retry - 5, long_sleep_time))
                    time.sleep(long_sleep_time)
                elif count_retry > 10:
                    log.critical("OOPS!! Error. Make sure you are connected to Internet and restart script.")
                    sys.exit(0)
                count_retry = count_retry + 1

            return self.session.get(full_url, allow_redirects=True, timeout=20, headers={'User-Agent': self.user_agent})

        except requests.ConnectionError as e:
            log.warn(e)
            return
        except requests.Timeout as e:
            log.warn(e)
            return self.load_page_by_url(full_url)
コード例 #21
0
 def es_search_exec(self, es_body, fields=None):
     # logger.debug("utils.is_not_production()=%s" % utils.is_not_production())
     # logger.debug("es_search_exec=%s" % ("%s" % es_body).replace("'", "\""))
     result = {}
     try:
         if fields:
             result = self.es.search(index=self.index_name,
                                     body=es_body,
                                     params=fields)
         else:
             result = self.es.search(index=self.index_name, body=es_body)
     except Exception as e:
         logger.warn("es_search_exec:%s,%s" % (es_body, e), exc_info=True)
     return result
コード例 #22
0
ファイル: function.py プロジェクト: berenm/gentulu
def reduce_functions():
    for f in raw_functions:
        li = library.get(f.stack.library_name)
        gr = li.groups[f.stack.extension_name.upper()]
        ex = gr.extensions[f.stack.extension_name.lower()]
        #    ca = ex.categories[f.stack.category_name.lower()]
        ca = ex.categories["functions"]

        if not f.name in ca.functions:
            ca.functions[f.name] = function(f)
        else:
            log.warn("F %s already in %s.%s.%s" % (f.name, li.name, ex.name, ca.name))
            ca.functions[f.name].in_files.append(f.stack.file_name)

    reduce_parameters()
コード例 #23
0
        def req_file_list(data_item: DataItem, pan_acc: PanAccounts):
            parent_id = 55
            from_dir = '/'
            if data_item:
                from_dir = data_item.path
                parent_id = data_item.id
            else:
                return
            log.info("sync file:{}, filename:{}".format(data_item.id, data_item.filename))
            if data_item.isdir == 1:
                json_data_list = restapi.file_list(pan_acc.access_token, from_dir)
                if json_data_list is not None:
                    log.info("update synced is -1, parent_id:{}".format(parent_id))
                    DataDao.update_data_item_by_parent_id(parent_id, {"synced": -1})
                else:
                    log.warn("json_data_list is null!")
                if json_data_list:
                    for fi in json_data_list:
                        item_map = dict(category=fi['category'],
                                        isdir=fi['isdir'],
                                        filename=fi['server_filename'],
                                        server_ctime=fi['server_ctime'],
                                        fs_id=fi['fs_id'],
                                        path=fi['path'],
                                        size=fi['size'],
                                        md5_val=fi.get('md5', ''),
                                        account_id=pan_acc.user_id,
                                        panacc=pan_acc.id,
                                        parent=parent_id,
                                        synced=0,
                                        pin=0
                                        )
                        di: DataItem = DataDao.get_data_item_by_fs_id(item_map['fs_id'])

                        if di:
                            item_map.pop('pin')
                            DataDao.update_data_item(di.id, item_map)
                            data_item: DataItem = DataDao.get_data_item_by_id(di.id)
                            # print("will update data item:", item_map)
                        else:
                            DataDao.save_data_item(fi['isdir'], item_map)
                            # print("will save data item:", item_map)
                        time.sleep(0.1)
                else:
                    log.info("have not any sub files!")
                self.__clear_data_items(parent_id, -1, True)
                self.__clear_data_items(parent_id, -1, False)
            DataDao.update_data_item(data_item.id, {"synced": 1})
コード例 #24
0
    def create_default_user(self, conn):
        """ this optional user will be used by the application """
        if not self.mysql_user or not self.mysql_password:
            log.warn('No default user/password configured.')
            return

        # there's some kind of annoying encoding bug in the lib here
        # so we have to format the string rather than passing it as
        # a param. totally safe, I bet.
        self.add(
            'CREATE USER `{}`@`%` IDENTIFIED BY %s;'.format(self.mysql_user),
            (self.mysql_password, ))
        if self.mysql_db:
            self.add('GRANT ALL ON `{}`.* TO `{}`@`%`;'.format(
                self.mysql_db, self.mysql_user))
        self.add('FLUSH PRIVILEGES;')
        self.execute_many(conn=conn)
コード例 #25
0
 def initialize_db(self):
     """
     post-installation run to set up data directories
     and install mysql.user tables
     """
     self.make_datadir()
     log.info('Initializing database...')
     try:
         subprocess.check_call([
             'mysqld', '--initialize-insecure', '--user=mysql',
             '--datadir={}'.format(self.datadir)
         ])
         log.info('Database initialized.')
         return True
     except subprocess.CalledProcessError:
         log.warn('Database was previously initialized.')
         return False
コード例 #26
0
ファイル: actions.py プロジェクト: morucci/rdopkg
def new_version_setup(patches_branch=None, local_patches=False,
                      new_version=None, version_tag_style=None):
    args = {}
    if new_version:
        # support both version and tag
        ver, _ = guess.tag2version(new_version)
        if ver != new_version:
            new_version = ver
            args['new_version'] = new_version
        new_version_tag = guess.version2tag(new_version, version_tag_style)
    else:
        ub = guess.upstream_branch()
        if not git.ref_exists('refs/remotes/%s' % ub):
            msg=("Upstream branch not found: %s\n"
                 "Can't guess latest version.\n\n"
                 "a) provide new version (git tag) yourself\n"
                 "   $ rdopkg new-version 1.2.3\n\n"
                 "b) add upstream git remote:\n"
                 "   $ git remote add -f upstream GIT_URL\n"
                 % ub)
            raise exception.CantGuess(msg=msg)
        new_version_tag = git.get_latest_tag(ub)
        new_version, _ = guess.tag2version(new_version_tag)
        args['new_version'] = new_version
        log.info("Latest version detected from %s: %s" % (ub, new_version))
    args['changes'] = ['Update to %s' % new_version]
    args['new_patches_base'] = new_version_tag
    spec = specfile.Spec()
    rpm_version = spec.get_tag('Version')
    new_rpm_version, new_milestone = specfile.version_parts(new_version)
    args['new_rpm_version'] = new_rpm_version
    if new_milestone:
        args['new_milestone'] = new_milestone
    if rpm_version != new_rpm_version:
        if new_milestone:
            args['new_release'] = '0.1'
        else:
            args['new_release'] = '1'
    if not local_patches:
        if not patches_branch or \
           not git.ref_exists('refs/remotes/' + patches_branch):
            log.warn("Patches branch '%s' not found. Running in --bump-only "
                     "mode." % patches_branch)
            args['bump_only'] = True
    return args
コード例 #27
0
 def update_by_query(self, es_body, params):
     if not params:
         return None
     _es_body = {}
     try:
         inf = ["ctx._source.%s = params.%s" % (f, f) for f in params]
         _es_body['query'] = es_body['query']
         _es_body['script'] = {
             'lang': 'painless',
             'params': params,
             'inline': ';'.join(inf)
         }
         print("_es_body:", _es_body)
         ret = self.es.update_by_query(index=self.index_name, body=_es_body)
     except Exception as e:
         logger.warn("update_fields:%s,%s,%s" % (_es_body, params, e),
                     exc_info=True)
         return None
     return ret
コード例 #28
0
def compare_blindboards(expected, actual, file_name=None):
    if not expected == actual:
        info = ""
        if file_name:
            info = "for `%s`" % os.path.basename(file_name)
        warn("BlindBoards are different %s: expected" % info)
        warn(expected)
        warn("But found instead:")
        warn(actual)
        raise AssertionError("BlindBoards are different")
        return False
    return True
コード例 #29
0
def compare_blindboards(expected, actual, file_name=None):
    if not expected == actual:
        info = ""
        if file_name:
            info = "for `%s`" % os.path.basename(file_name)
        warn("BlindBoards are different %s: expected" % info)
        warn(expected)
        warn("But found instead:")
        warn(actual)
        raise AssertionError("BlindBoards are different")
        return False
    return True
コード例 #30
0
    def do_next(self, _):
        '''TODO'''
        # make sure the game is still on
        if self.current_board.is_game_over():
            print('Game is over! Type "help" to see authorized commands')

        # img capture!
        img_path = self._get_img_name()
        self.capture_engine.capture(img_path)
        assert os.path.exists(img_path)

        if self.do_live_analysis:
            try:
                self._process_next_move(img_path)
            except IllegalMove as e:
                log.warn(str(e))
                log.info('no move registered; type "next" again to retry')
        else:
            self.current_board.push(chess.Move.null())
コード例 #31
0
ファイル: main.py プロジェクト: superwhoopy/chessreader
    def do_next(self, _):
        '''TODO'''
        # make sure the game is still on
        if self.current_board.is_game_over():
            print('Game is over! Type "help" to see authorized commands')

        # img capture!
        img_path = self._get_img_name()
        self.capture_engine.capture(img_path)
        assert os.path.exists(img_path)

        if self.do_live_analysis:
            try:
                self._process_next_move(img_path)
            except IllegalMove as e:
                log.warn(str(e))
                log.info('no move registered; type "next" again to retry')
        else:
            self.current_board.push(chess.Move.null())
コード例 #32
0
ファイル: actions.py プロジェクト: codenrhoden/rdopkg
def check_new_patches(version, local_patches_branch, local_patches=False,
                      patches_branch=None, changes=None):
    if not changes:
        changes = []
    if local_patches:
        head = local_patches_branch
    else:
        if not patches_branch:
            raise exception.RequiredActionArgumentNotAvailable(
                action='check_new_patches', arg='patches_branch')
        head = patches_branch
    spec = specfile.Spec()
    n_patches = spec.get_n_patches() + spec.get_n_excluded_patches()
    patches = git.get_commit_subjects(version, head)
    if n_patches > 0:
        patches = patches[0:-n_patches]
    if not patches:
        log.warn("No new patches detected in %s." % head)
        helpers.confirm("Do you want to continue anyway?", default_yes=False)
    changes.extend(patches)
    return {'changes': changes}
コード例 #33
0
    def _process_next_move(self, image_path):
        '''TODO'''
        log.debug('running img processing on "{}"...'.format(image_path))
        self.image_processor.process(image_path)
        new_blindboard = self.image_processor.get_blindboard()
        diff = new_blindboard.diff(self.last_valid_blindboard)
        move = core.diffreader.read(diff)
        log.info("see {} playing: {}".format(self._get_turn_str(), move))

        if not self.current_board.is_legal(move):
            log.warn("Illegal move: {}".format(move))
            raise IllegalMove(move)

        # the move is legit: change state of board and blindboard
        self.current_board.push(move)
        self.last_valid_blindboard = new_blindboard
        self._print_game_status()

        if self.do_show_each_move:
            self.do_show(None)

        return move
コード例 #34
0
 def bulk_delete(self, doc_ids):
     try:
         logger.debug("ES:%s,delete=%s" % (self.doc_type, doc_ids))
         actions = []
         for _id in doc_ids:
             actions.append({
                 "_op_type": "delete",
                 "_id": _id,
                 "_index": self.index_name,
                 "_type": self.doc_type
             })
             # actions.append({"_op_type": "delete", "_id": _id, "_index": self.index_name})
         success, errors = helpers.bulk(client=self.es, actions=actions)
         logger.info("success count:{}, errors:{}".format(success, errors))
         ret = {"success": success, "errors": errors}
     except exceptions.NotFoundError:
         logger.warn("not found doc:{}".format(doc_ids))
         ret = None
     except Exception as e:
         logger.warn("delete:%s,%s" % (doc_ids, e), exc_info=True)
         ret = None
     return ret
コード例 #35
0
    def get_content(self, soup):
        content = ""
        try:
            div = soup.find('div', {'id': 'mw-content-text'})
            p_list = div.find_all('p')

            for p in p_list:
                try:
                    text = p.get_text().strip()
                except AttributeError:
                    continue

                text = self.parenthesis_regex.sub('', text)
                text = self.citations_regex.sub('', text)

                if not content.strip():
                    content = text + '\n'
                else:
                    content = content + " " + text + '\n'
            return content
        except AttributeError as e:
            log.warn(e)
コード例 #36
0
ファイル: main.py プロジェクト: superwhoopy/chessreader
    def _process_next_move(self, image_path):
        '''TODO'''
        log.debug('running img processing on "{}"...'.format(image_path))
        self.image_processor.process(image_path)
        new_blindboard = self.image_processor.get_blindboard()
        diff = new_blindboard.diff(self.last_valid_blindboard)
        move = core.diffreader.read(diff)
        log.info("see {} playing: {}".format(self._get_turn_str(), move))

        if not self.current_board.is_legal(move):
            log.warn("Illegal move: {}".format(move))
            raise IllegalMove(move)

        # the move is legit: change state of board and blindboard
        self.current_board.push(move)
        self.last_valid_blindboard = new_blindboard
        self._print_game_status()

        if self.do_show_each_move:
            self.do_show(None)

        return move
コード例 #37
0
    def eval_run(self):
        # load checkpoint
        if self.checkpoint_path:
            self.saver.restore(self.session, self.checkpoint_path)
            log.info("Loaded from checkpoint!")

        log.infov("Start 1-epoch Inference and Evaluation")

        log.info("# of examples = %d", len(self.dataset))
        length_dataset = len(self.dataset)

        max_steps = int(length_dataset / self.batch_size) + 1
        log.info("max_steps = %d", max_steps)

        coord = tf.train.Coordinator()
        threads = tf.train.start_queue_runners(self.session,
                                               coord=coord,
                                               start=True)

        evaler = EvalManager()
        try:
            for s in range(max_steps):
                step, loss, step_time, prediction_pred, prediction_gt = \
                    self.run_single_step(self.batch)
                self.log_step_message(s, loss, step_time)
                evaler.add_batch(prediction_pred, prediction_gt)

        except Exception as e:
            coord.request_stop(e)

        coord.request_stop()
        try:
            coord.join(threads, stop_grace_period_secs=3)
        except RuntimeError as e:
            log.warn(str(e))

        evaler.report()
        log.infov("Evaluation complete.")
コード例 #38
0
ファイル: s3restore.py プロジェクト: rjframe/s3-backup
def get_restore_archive(bucket, schedule, date, path):
    '''Retrieves the archive from S3, saves it in config.dest_location, and
    returns the filename.'''
    import os.path
    import boto.exception

    key, name = build_key(bucket, schedule, date)
    try:
        # Throws AttributeError if the key doesn't exist
        is_encrypted = key.get_metadata('enc')
    except AttributeError:
        log.debug('The key "enc" does not exist.')
        is_encrypted = False
    
    if os.path.isdir(path):
        archive_path = os.path.join(path, name)
    else:
        log.warn('Invalid path "%s" given. Using default directory %s' %
                (path, config.dest_location))
        archive_path = os.path.join(config.dest_location, name)
    
    try:
        if not os.path.exists(path):
            os.makedirs(path)
    except OSError:
        log.error('Cannot create directory %s.' % config.dest_location)
        exit(1)

    try:
        key.get_contents_to_filename(archive_path)
    except boto.exception.S3ResponseError:
        log.error('The archive %s does not exist.' % key.key)
        exit(1)
    except AttributeError:
        log.info('There is not a %s backup on %s.' % (schedule, date))
        exit(1)
    
    return archive_path, is_encrypted
コード例 #39
0
ファイル: keyengine.py プロジェクト: bweir/packthing2
def newDict(value, group=None, name=None):
    available = keys(group)

    d = {}
    for k, v in value.items():
        try:
            available.remove(k)
        except ValueError:
            log.error("Key '"+k+"' not valid in '"+group+"' group")

        if isinstance(v, dict):
            d[k] = dictionary(k, group)(v)
        elif isinstance(v, list):
            d[k] = array(k, group)(v)
        elif isinstance(v, str):
            d[k] = key(k, group)(v)
        else:
            log.warn("Group '"+k+"' has no keys")

    for a in available:
        required(a, group, name)

    return d
コード例 #40
0
ファイル: actions.py プロジェクト: tjyang/rdopkg
def get_upstream_patches(version, local_patches_branch, patches_branch=None, upstream_branch=None, new_milestone=None):
    patches = git(
        "log",
        "--cherry-pick",
        "--pretty=format:\%s",
        "%(remote)s...%(local)s" % {"remote": patches_branch, "local": local_patches_branch},
    )
    changes = [p.strip().replace("\\", "") for p in patches.split("\n") if p != ""]

    if not changes:
        log.warn("No new patches detected in %s." % local_patches_branch)
        helpers.confirm("Do you want to continue anyway?", default_yes=False)

    n_patches = len(changes)
    changes.insert(0, ("Rebase %s changes from %s" % (n_patches, upstream_branch)))
    args = {"changes": changes}
    if n_patches > 0:
        if new_milestone:
            new_milestone += ".p%d" % n_patches
        else:
            new_milestone = "p%d" % n_patches
        args["new_milestone"] = new_milestone
    return args
コード例 #41
0
ファイル: actions.py プロジェクト: morucci/rdopkg
def clone(package, force_fetch=False, use_master_distgit=False):
    inforepo = rdoinfo.get_default_inforepo()
    inforepo.init(force_fetch=force_fetch)
    pkg = inforepo.get_package(package)
    if not pkg:
        raise exception.InvalidRDOPackage(package=package)
    if use_master_distgit:
        try:
            distgit = pkg['master-distgit']
            distgit_str = 'master-distgit'
        except KeyError:
            raise exception.InvalidUsage(
                msg="-m/--use-master-distgit used but 'master-distgit' "
                    "missing in rdoinfo for package: %s" % package)
    else:
        distgit = pkg['distgit']
        distgit_str = 'distgit'
    log.info("Cloning {dg} into ./{t.bold}{pkg}{t.normal}/".format(
        t=log.term, dg=distgit_str, pkg=package))
    patches = pkg.get('patches')
    upstream = pkg.get('upstream')

    git('clone', distgit, package)
    with helpers.cdir(package):
        if patches:
            log.info('Adding patches remote...')
            git('remote', 'add', 'patches', patches)
        else:
            log.warn("'patches' remote information not available in rdoinfo.")
        if upstream:
            log.info('Adding upstrem remote...')
            git('remote', 'add', 'upstream', upstream)
        else:
            log.warn("'upstream' remote information not available in rdoinfo.")
        if patches or upstream:
            git('fetch', '--all')
        git('remote', '-v', direct=True)
コード例 #42
0
ファイル: actions.py プロジェクト: codenrhoden/rdopkg
def get_upstream_patches(version, local_patches_branch,
                         patches_branch=None, upstream_branch=None,
                         new_milestone=None):
    patches = git("log", "--cherry-pick", "--pretty=format:\%s",
                  "%(remote)s...%(local)s" % {'remote': patches_branch,
                                              'local': local_patches_branch})
    changes = [p.strip().replace('\\', '')
               for p in patches.split('\n') if p != '']

    if not changes:
        log.warn("No new patches detected in %s." % local_patches_branch)
        helpers.confirm("Do you want to continue anyway?", default_yes=False)

    n_patches = len(changes)
    changes.insert(0, ("Rebase %s changes from %s" %
                       (n_patches, upstream_branch)))
    args = {'changes': changes}
    if n_patches > 0:
        if new_milestone:
            new_milestone += '.p%d' % n_patches
        else:
            new_milestone = 'p%d' % n_patches
        args['new_milestone'] = new_milestone
    return args
コード例 #43
0
ファイル: actions.py プロジェクト: hguemar/rdoupdate
def download_updates_builds(*files, **kwargs):
    good = []
    fails = []
    per_update = kwargs.get('per_update', False)
    out_dir = kwargs.get('out_dir', None)
    build_filter = kwargs.get('build_filter', [])
    if not out_dir:
        out_dir = ''
    prefix = None
    for f in files:
        try:
            bn = os.path.basename(f)
            if per_update:
                prefix = bn
            log.info(log.term.bold('downloading %s' % bn))
            update = check_file(f)
            builds = update.download(out_dir=out_dir, prefix=prefix,
                                     build_filter=build_filter)
            if builds:
                good.append((f, update, builds))
        except Exception as ex:
            log.warn(str(ex))
            fails.append((f, ex))
    return good, fails
コード例 #44
0
ファイル: actions.py プロジェクト: tjyang/rdopkg
def clone(package, force_fetch=False, use_master_distgit=False):
    inforepo = rdoinfo.get_default_inforepo()
    inforepo.init(force_fetch=force_fetch)
    pkg = inforepo.get_package(package)
    if not pkg:
        raise exception.InvalidRDOPackage(package=package)
    if use_master_distgit:
        try:
            distgit = pkg["master-distgit"]
            distgit_str = "master-distgit"
        except KeyError:
            raise exception.InvalidUsage(
                msg="-m/--use-master-distgit used but 'master-distgit' " "missing in rdoinfo for package: %s" % package
            )
    else:
        distgit = pkg["distgit"]
        distgit_str = "distgit"
    log.info("Cloning {dg} into ./{t.bold}{pkg}{t.normal}/".format(t=log.term, dg=distgit_str, pkg=package))
    patches = pkg.get("patches")
    upstream = pkg.get("upstream")

    git("clone", distgit, package)
    with helpers.cdir(package):
        if patches:
            log.info("Adding patches remote...")
            git("remote", "add", "patches", patches)
        else:
            log.warn("'patches' remote information not available in rdoinfo.")
        if upstream:
            log.info("Adding upstream remote...")
            git("remote", "add", "upstream", upstream)
        else:
            log.warn("'upstream' remote information not available in rdoinfo.")
        if patches or upstream:
            git("fetch", "--all")
        git("remote", "-v", direct=True)
コード例 #45
0
ファイル: constant.py プロジェクト: berenm/gentulu
def reduce_constants():
  log.info('resolving constant values...')
  for c in raw_constants:
    while '_' in c.value:
      if '.' in c.value:
        scope_name, constant_name = c.value.split('.')
      else:
        scope_name = None
        constant_name = c.value

      resolved_values = set([ o.value for o in raw_constants if o.name == constant_name and o.value != constant_name and scope_name in [ None, o.stack.library_name, o.stack.extension_name, o.stack.category_name ] ])
      if len(resolved_values) > 1:
        log.warn(c.name + ': ' + c.value + ', multiple value possible, taking first: ' + str(resolved_values))
        c.value = resolved_values.pop()
      elif len(resolved_values) > 0:
        c.value = resolved_values.pop()
      else:
        log.warn(c.name + ': ' + c.value + ', no value found')
        break

  log.info('reducing constants...')
  for rc in raw_constants:
    li = library.get(rc.stack.library_name)
    gr = li.groups[rc.stack.extension_name.upper()]
    ex = gr.extensions[rc.stack.extension_name.lower()]
#    ca = ex.categories[c.stack.category_name.lower()]
    ca = ex.categories['constants']

    c = constant(rc)
    if not c.name in ca.constants:
      ca.constants[c.name] = constant(c)
    elif c == ca.constants[c.name]:
      ca.constants[c.name].update(c)
    else:
      log.warn('C %s (%s, %s) already in %s.%s.%s' % (c.name, c.value, ca.constants[c.name].value, li.name, ex.name, ca.name))
    else:
コード例 #46
0
ファイル: utils.py プロジェクト: superwhoopy/chessreader
def confirm_yes_no(msg):
    """TODO"""
    log.warn(msg)
    ans = input("(y/n): ")
    return ans.strip() in ["y", "Y"]
コード例 #47
0
ファイル: utils.py プロジェクト: superwhoopy/chessreader
def confirm_yes_no(msg):
    '''TODO'''
    log.warn(msg)
    ans = input("(y/n): ")
    return ans.strip() in ['y', 'Y']
コード例 #48
0
    algo: str = d['algo']
    if algo == 'word':
        print('is word')

        return Response(map(
            lambda b: b.decode('ascii', 'ignore'),
            genw(seed=d['seed'].encode('ascii', 'ignore'),
                 n=d['n'],
                 max_len=d['max_len'])),
                        mimetype='text/plain')

    elif algo == 'char':
        return Response(map(
            chr,
            genc(seed=d['seed'].encode('ascii', 'ignore'),
                 n=d['n'],
                 max_len=d['max_len'])),
                        mimetype='text/plain')
    else:
        raise Exception('unknown algorithm')


# pre-load
log.warning('[pre-loading] this should take < 1min ...')
start = time()
list(genw(n=8, max_len=10))
list(genc(n=8, max_len=10))
log.warn(f'[finished] pre-loading (took {time() - start:4.2f}s)')

app.run(port=3000)
コード例 #49
0
ファイル: main.py プロジェクト: superwhoopy/chessreader
 def do_analyze(self, _):
     '''End the current game and run analysis on captured images'''
     if self.do_live_analysis:
         log.warn('live analysis is on: nothing to do')
         return
コード例 #50
0
    def __init__(self, cfg, dataset):
        self.config = cfg
        self.train_dir = cfg.train_dir
        log.info("self.train_dir = %s", self.train_dir)

        # --- input ops ---
        self.batch_size = cfg.batch_size
        self.dataset = dataset
        self.batch = create_input_ops(dataset, self.batch_size, shuffle=False)

        # --- create model ---
        if cfg.model == 'baseline':
            from models.baseline import Model
        elif cfg.model == 'rn':
            from models.rn import Model
        elif cfg.model == 'film':
            from models.film import Model
        else:
            raise ValueError(cfg.model)
        log.infov("Using Model class : %s", Model)
        self.model = Model(Q_DIM, NUM_ANS, is_train=False)

        self.img = tf.placeholder(
            name='img',
            dtype=tf.float32,
            shape=[self.batch_size, cfg.image_size, cfg.image_size, 3],
        )
        self.q = tf.placeholder(name='q',
                                dtype=tf.float32,
                                shape=[cfg.batch_size, Q_DIM])
        self.a = tf.placeholder(name='a',
                                dtype=tf.float32,
                                shape=[cfg.batch_size, NUM_ANS])

        logits = self.model.build(self.img, self.q)

        correct_prediction = tf.equal(tf.argmax(logits, 1),
                                      tf.argmax(self.a, 1))
        self.accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
        self.all_preds = tf.nn.softmax(logits)

        self.global_step = tf.contrib.framework.get_or_create_global_step(
            graph=None)
        self.step_op = tf.no_op(name='step_no_op')

        tf.set_random_seed(1234)

        session_config = tf.ConfigProto(gpu_options=tf.GPUOptions(
            allow_growth=True))
        self.session = tf.Session(config=session_config)

        # --- checkpoint and monitoring ---
        self.saver = tf.train.Saver()

        self.checkpoint_path = cfg.checkpoint_path
        if self.checkpoint_path is None and self.train_dir:
            self.checkpoint_path = tf.train.latest_checkpoint(self.train_dir)
        if self.checkpoint_path is None:
            log.warn("No checkpoint is given. Just random initialization :-)")
            self.session.run(tf.global_variables_initializer())
        else:
            log.info("Checkpoint path : %s", self.checkpoint_path)
コード例 #51
0
ファイル: test.py プロジェクト: bweir/packthing2
def isEmpty(key, value):
    if value == None:
        log.warn("'"+key+"' is defined but empty") 
コード例 #52
0
 def supported(self):
     log.warn("No supported values for config item '"+self.name+"'")
     return []