示例#1
0
def delete_physical_card_set(sSetName):
    """Unconditionally delete a PCS and its contents"""
    # pylint: disable-msg=E1101
    # SQLObject confuse pylint
    def _delete_cards(oCS):
        """Remove cards from the card set.

           Intended to be wrapped in a transaction for speed."""
        for oCard in oCS.cards:
            oCS.removePhysicalCard(oCard)
    try:
        oCS = PhysicalCardSet.byName(sSetName)
        aChildren = find_children(oCS)
        for oChildCS in aChildren:
            oChildCS.parent = oCS.parent
            oChildCS.syncUpdate()
        if hasattr(sqlhub.processConnection, 'commit'):
            # We're already in a transaction, so just delete
            _delete_cards(oCS)
        else:
            # wrap this in a transaction for speed
            sqlhub.doInTransaction(_delete_cards, oCS)
        PhysicalCardSet.delete(oCS.id)
        return True
    except SQLObjectNotFound:
        return False
示例#2
0
def delete_physical_card_set(sSetName):
    """Unconditionally delete a PCS and its contents"""
    # pylint: disable=no-member
    # SQLObject confuse pylint
    def _delete_cards(oCS):
        """Remove cards from the card set.

           Intended to be wrapped in a transaction for speed."""
        for oCard in oCS.cards:
            oCS.removePhysicalCard(oCard)
    try:
        oCS = PhysicalCardSet.byName(sSetName)
        aChildren = find_children(oCS)
        for oChildCS in aChildren:
            oChildCS.parent = oCS.parent
            oChildCS.syncUpdate()
        if hasattr(sqlhub.processConnection, 'commit'):
            # We're already in a transaction, so just delete
            _delete_cards(oCS)
        else:
            # wrap this in a transaction for speed
            sqlhub.doInTransaction(_delete_cards, oCS)
        PhysicalCardSet.delete(oCS.id)
        return True
    except SQLObjectNotFound:
        return False
示例#3
0
    def create_pcs(self, oCardLookup=DEFAULT_LOOKUP):
        """Create a Physical Card Set.
           """
        if self.name is None:
            raise RuntimeError("No name for the card set")

        aCardCnts = self._dCards.items()
        aAbsCards = oCardLookup.lookup([tCardCnt[0] for tCardCnt in aCardCnts],
                'Card Set "%s"' % self.name)
        dNameCards = dict(zip(self._dCards.keys(), aAbsCards))

        aExpNames = self._dExpansions.keys()
        aExps = oCardLookup.expansion_lookup(aExpNames, "Physical Card List",
                self._dCardExpansions)
        dExpansionLookup = dict(zip(aExpNames, aExps))

        aPhysCards = oCardLookup.physical_lookup(self._dCardExpansions,
                dNameCards, dExpansionLookup, 'Card Set "%s"' % self.name)

        if hasattr(sqlhub.processConnection, 'commit'):
            # We're already in a transaction, so doInTransaction is
            # pointless
            self._commit_pcs(aPhysCards)
        else:
            sqlhub.doInTransaction(self._commit_pcs, aPhysCards)
示例#4
0
def import_inpx(path, pbar_cb=None):
    inpx = ZipFile(path)
    if pbar_cb:
        inp_count = 0
        for name in inpx.namelist():
            ext = os.path.splitext(name)[1]
            if ext == '.inp':
                inp_count += 1
        pbar_cb.set_max(inp_count)
    inp_count = 0
    for name in inpx.namelist():
        archive, ext = os.path.splitext(name)
        if ext != '.inp':
            continue
        if pbar_cb:
            inp_count += 1
            pbar_cb.display(inp_count)
        inp = inpx.open(name)
        sqlhub.doInTransaction(import_inp, archive + '.zip', inp)
        inp.close()
    connection = sqlhub.processConnection
    if connection.dbName == 'postgres':
        for table in Author, Book, Extension, Genre, Language:
            connection.query("VACUUM %s" % table.sqlmeta.table)
    elif connection.dbName == 'sqlite':
        connection.query("VACUUM")
    if pbar_cb:
        pbar_cb.close()
    def create_pcs(self, oCardLookup=DEFAULT_LOOKUP):
        """Create a Physical Card Set.
           """
        if self.name is None:
            raise RuntimeError("No name for the card set")

        aCardCnts = self._dCards.items()
        aAbsCards = oCardLookup.lookup([tCardCnt[0] for tCardCnt in aCardCnts],
                                       'Card Set "%s"' % self.name)
        # Ordering is correct because of how we created aAbsCards
        dNameCards = dict(zip(self._dCards, aAbsCards))

        dPrintingLookup = oCardLookup.printing_lookup(self._dExpansions,
                                                      "Physical Card List",
                                                      self._dCardExpansions)

        aPhysCards = oCardLookup.physical_lookup(self._dCardExpansions,
                                                 dNameCards, dPrintingLookup,
                                                 'Card Set "%s"' % self.name)

        if hasattr(sqlhub.processConnection, 'commit'):
            # We're already in a transaction, so doInTransaction is
            # pointless
            self._commit_pcs(aPhysCards)
        else:
            sqlhub.doInTransaction(self._commit_pcs, aPhysCards)
    def _commit_cards(self, oCS, aCards):
        """Add a list of physiccal cards to the given card set"""
        def _in_transaction(oCS, aCards):
            """The actual work happens here, so it can be wrapped in a
               sqlobject transaction"""
            for oCard in aCards:
                # pylint: disable=no-member
                # SQLObject confuses pylint
                oCS.addPhysicalCard(oCard)

        sqlhub.doInTransaction(_in_transaction, oCS, aCards)
示例#7
0
    def create_pcs(self, oCardLookup=DEFAULT_LOOKUP, dLookupCache={}):
        """Create a Physical Card Set.

           dLookupCache is updated as soon as possible, i.e. immediately after
           calling oCardLookup.lookup(...).
           """
        # Need to cache both abstract card lookups & expansion lookups
        # pylint: disable-msg=R0914
        # We use a lot of local variables for clarity
        dLookupCache.setdefault('cards', {})
        dLookupCache.setdefault('expansions', {})
        if self.name is None:
            raise RuntimeError("No name for the card set")

        aCardCnts = self._dCards.items()
        aAbsCards = oCardLookup.lookup([dLookupCache['cards'].get(tCardCnt[0],
            tCardCnt[0]) for tCardCnt in aCardCnts],
            'Card Set "%s"' % self.name)
        dNameCards = dict(zip(self._dCards.keys(), aAbsCards))

        # Update dLookupCache
        for oAbs, (sName, _iCnt) in zip(aAbsCards, aCardCnts):
            if not oAbs:
                dLookupCache['cards'][sName] = None
            else:
                dLookupCache['cards'][sName] = oAbs.canonicalName

        # Apply Expansion lookups
        aExpNames = [dLookupCache['expansions'].get(sExp, sExp) for sExp
                in self._dExpansions]
        dCardExpansions = {}
        for sName in self._dCardExpansions:
            dCardExpansions[sName] = {}
            for sExp, iCnt in self._dCardExpansions[sName].iteritems():
                dCardExpansions[sName][dLookupCache['expansions'].get(sExp,
                    sExp)] = iCnt
        aExps = oCardLookup.expansion_lookup(aExpNames, "Physical Card List",
                self._dCardExpansions)
        dExpansionLookup = dict(zip(aExpNames, aExps))
        # Update expansion lookup cache
        for sName, oExp in dExpansionLookup.iteritems():
            if not oExp:
                dLookupCache['expansions'][sName] = None
            else:
                dLookupCache['expansions'][sName] = oExp.name

        aPhysCards = oCardLookup.physical_lookup(dCardExpansions,
                dNameCards, dExpansionLookup, 'Card Set "%s"' % self.name)

        if hasattr(sqlhub.processConnection, 'commit'):
            self._commit_pcs(aPhysCards)
        else:
            sqlhub.doInTransaction(self._commit_pcs, aPhysCards)
示例#8
0
def main ():

    # assume the hookbox server is on localhost:8001
    url = base_url + "/publish"

    values = { "channel_name" : "/lobby",
               "security_token" : secret,
             }

    #
    #send("/heartbeat", {})
    create_channel("/heartbeat")
    create_channel("/lobby")

    time.sleep(60) #when you start up, give everyone a minute to check in

    while True:
        try:
            sqlhub.doInTransaction(check_for_heartbeat)
        except:
            print_exc()
            time.sleep(10)
        time.sleep(1)
示例#9
0
def get_app(config_path):
    """
        Returns the flask main application of ICTV.
        Currently, only one application can be run a time due to how data such as assets, database, config files
        or plugins is stored.
    """
    config = get_config(config_path)
    if database.database_path is None:
        database.database_path = config['database_uri']

    # Create a base flask application
    app = FrankenFlask(__name__)

    # The following line might be used to speedup queries
    app.config["SEND_FILE_MAX_AGE_DEFAULT"] = 300

    app.config.update(**config)

    init_flask_url_mapping(app)

    app.version = ictv.common.__version__

    with open(os.path.join(get_root_path(), 'info' + os.extsep + 'yaml')) as f:
        # Loads ICTV user info texts
        info_texts = yaml.unsafe_load(f)

    # Load the SMTP config into web.py
    smtp_conf = app.config.get('smtp', None)
    if smtp_conf:
        app.config['MAIL_DEFAULT_SENDER'] = smtp_conf['sender_name']
        app.config['MAIL_SERVER'] = smtp_conf['host']
        app.config['MAIL_PORT'] = smtp_conf['port']
        app.config['MAIL_USERNAME'] = smtp_conf.get('username', '')
        app.config['MAIL_PASSWORD'] = smtp_conf.get('password', '')
        app.config['MAIL_USE_TLS'] = smtp_conf.get('starttls', False)

    # Create a persistent HTTP session storage for the app
    app.secret_key = app.config['session_secret_key']
    # Populate the jinja templates globals
    template_globals = {
        'session': app.session,
        'get_feedbacks': get_feedbacks,
        'get_next_feedbacks': get_next_feedbacks,
        'pop_previous_form': pop_previous_form,
        'UserPermissions': UserPermissions,
        'json': json,
        'str': str,
        'sorted': sorted,
        'hasattr': hasattr,
        'sidebar_collapse': False,
        'show_header': True,
        'show_footer': True,
        're': re,
        'info': info_texts,
        'make_tooltip': make_tooltip,
        'make_alert': make_alert,
        'escape': html.escape,
        'show_reset_password': '******' in app.config['authentication'],
        'homedomain': lambda: flask.request.url_root[:-1],
        'generate_secret': generate_secret,
        'version': lambda: app.version,
        'pretty_print_size': pretty_print_size,
        'timesince': timesince,
        'User': User,
        'get_user': lambda: User.get(app.session['user']['id'])
    }

    ### Jinja2 renderer ###
    app.renderer = render_jinja(os.path.join(get_root_path(), 'templates/'))
    app.renderer._lookup.globals.update(base='base.html', **template_globals)

    app.standalone_renderer = render_jinja(
        os.path.join(get_root_path(), 'templates/'))
    app.standalone_renderer._lookup.globals.update(**template_globals)

    # Init loggers
    load_loggers_stats()
    # Determine logging level and user feedback when an internal error occurs based on ICTV core config
    level = logging.INFO

    loggers_to_init = [
        'app', 'pages', 'screens', 'plugin_manager', 'storage_manager',
        'local_login', 'database', 'transcoding_queue'
    ]
    for logger_name in loggers_to_init:
        init_logger(logger_name,
                    level,
                    rotation_interval=app.config['logs']['rotation_interval'],
                    backup_count=app.config['logs']['backup_count'])

    # Init the renderer used for slide, capsule, channel and screen rendering
    app.ictv_renderer = ICTVRenderer(app)
    # Init the plugin manager, used as a gateway between ICTV core and its plugins.
    app.plugin_manager = PluginManager(app)

    # Init the download manager, a download queue which asynchronously downloads assets from the network
    app.download_manager = DownloadManager()
    # Init the cleanup manager which will regularly cleanup unused cached assets
    app.cleanup_scheduler = CleanupScheduler()
    app.cleanup_scheduler.start()
    # Init the video transcoding queue which will convert videos to WebM format using FFmpeg
    app.transcoding_queue = TranscodingQueue()

    # Add an general authentication processor to handle user authentication
    app.register_before_request(get_authentication_processor,
                                cascade=True,
                                needs_app=True)

    # Add a preprocessor to populate flask.g and mimic the old web.ctx
    app.register_before_request(get_web_ctx_processor, cascade=True)

    # Add a preprocessor to encapsulate every SQL requests in a transaction on a per HTTP request basis
    app.register_before_request(get_db_thread_preprocessor, cascade=True)
    app.prepare_error_handler(DatabaseError, lambda: database_error_handler)
    app.prepare_error_handler(werkzeug.exceptions.InternalServerError,
                              lambda: internal_error_handler)

    # Add a hook to clean feedbacks from the previous request and prepare next feedbacks to be shown to the user
    app.register_after_request(lambda: rotate_feedbacks,
                               cascade=True,
                               needs_app=False)

    # Instantiate plugins through the plugin manager
    app.plugin_manager.instantiate_plugins(app)

    # Load themes and templates into database
    sqlhub.doInTransaction(load_templates_and_themes)

    return app
示例#10
0
 def exit_fun():
     if database.database_path is not None:
         sqlhub.threadConnection = SQLObjectThreadConnection.get_conn()
         sqlhub.doInTransaction(dump_log_stats)
         close_database()
         os._exit(0)
示例#11
0
def import_glst():
    count_old, count_new = sqlhub.doInTransaction(_import_glst)
    connection = sqlhub.processConnection
    if connection.dbName == 'sqlite':
        connection.query("VACUUM")
    return count_old, count_new
示例#12
0
    def create_pcs(self, oCardLookup=DEFAULT_LOOKUP, dLookupCache={}):
        """Create a Physical Card Set.

           dLookupCache is updated as soon as possible, i.e. immediately after
           calling oCardLookup.lookup(...).
           """
        # Need to cache both abstract card lookups & expansion lookups
        # pylint: disable=too-many-locals
        # We use a lot of local variables for clarity
        dLookupCache.setdefault('cards', {})
        dLookupCache.setdefault('printings', {})
        # Special cases are for when we have a legal card and a legal expansion
        # but the combination doesn't actually match a real card in the database
        # This can be due to cards having being published with the wrong expanion
        # and later fixed, a manually created entry and so on.
        dLookupCache.setdefault('special cases', {})
        if self.name is None:
            raise RuntimeError("No name for the card set")

        aCardCnts = self._dCards.items()
        aAbsCards = oCardLookup.lookup(
            [dLookupCache['cards'].get(tCardCnt[0], tCardCnt[0])
             for tCardCnt in aCardCnts],
            'Card Set "%s"' % self.name)
        dNameCards = dict(zip(self._dCards, aAbsCards))

        dReverse = {}

        # Update dLookupCache
        for oAbs, (sName, _iCnt) in zip(aAbsCards, aCardCnts):
            if not oAbs:
                dLookupCache['cards'][sName] = None
            else:
                dLookupCache['cards'][sName] = oAbs.canonicalName
                dReverse.setdefault(oAbs.canonicalName, [])
                dReverse[oAbs.canonicalName].append(sName)

        # Apply expansion and print lookups
        aExpPrintNames = []
        for tExpPrint in self._dExpansions:
            tNewExpPrint = dLookupCache['printings'].get(tExpPrint, tExpPrint)
            aExpPrintNames.append(tNewExpPrint)
        dCardExpansions = {}
        for sName in self._dCardExpansions:
            dCardExpansions[sName] = {}
            for tExpPrint, iCnt in self._dCardExpansions[sName].items():
                tNewExpPrint = dLookupCache['printings'].get(tExpPrint,
                                                             tExpPrint)
                dCardExpansions[sName][tNewExpPrint] = iCnt

        dPrintingLookup = oCardLookup.printing_lookup(aExpPrintNames,
                                                      "Physical Card List",
                                                      dCardExpansions)
        # Update printing lookups using the cache
        for tExpPrint, oPrinting in dPrintingLookup.items():
            if not oPrinting:
                dLookupCache['printings'][tExpPrint] = (None, None)
            else:
                dLookupCache['printings'][tExpPrint] = \
                        (oPrinting.expansion.name, oPrinting.name)

        # Apply special casesa
        for sName in dCardExpansions:
            if dNameCards[sName]:
                for tExpPrint in list(dCardExpansions[sName]):
                    iCnt = dCardExpansions[sName][tExpPrint]
                    # Check for special cases
                    tCachedPrint = dLookupCache['special cases'].get((sName, tExpPrint),
                                                                      tExpPrint)
                    # If this is a special case, point the numbers at the correct
                    # expansion
                    if tCachedPrint != tExpPrint:
                        dCardExpansions[sName][tExpPrint] = 0
                        dCardExpansions[sName][tCachedPrint] = iCnt

        aPhysCards = oCardLookup.physical_lookup(dCardExpansions,
                                                 dNameCards, dPrintingLookup,
                                                 'Card Set "%s"' % self.name)

        # Update special cases map
        for oCard in aPhysCards:
            if oCard.printing:
                tPrintKey = (oCard.printing.expansion.name, oCard.printing.name)
                for sName in dReverse[oCard.abstractCard.canonicalName]:
                    if sName not in dCardExpansions:
                        continue
                    if tPrintKey not in dCardExpansions[sName] and len(dCardExpansions[sName]) == 1:
                        # We didn't look this up correctly before calling physical lookup,
                        # and there was only 1 suggestion, so we can add a special case
                        tOldExp = list(dCardExpansions[sName])[0]
                        dLookupCache['special cases'][(sName, tOldExp)] = tPrintKey

        if hasattr(sqlhub.processConnection, 'commit'):
            self._commit_pcs(aPhysCards)
        else:
            sqlhub.doInTransaction(self._commit_pcs, aPhysCards)
示例#13
0
文件: app.py 项目: nrybowski/ICTV
def get_app(config_path, sessions_path=""):
    """
        Returns the web.py main application of ICTV.
        Currently, only one application can be run a time due to how data such as assets, database, config files
        or plugins is stored.
    """
    # Loads ICTV core config file
    config_file = get_config(config_path)
    if database.database_path is None:
        database.database_path = config_file['database_uri']

    app_urls = urls
    if config_file['debug']['dummy_login']:
        app_urls += ('/login/(.+)', 'ictv.pages.utils.DummyLogin')

    if config_file['debug']['debug_env']:
        app_urls += ('/debug_env', 'DebugEnv')

    if 'local' in config_file['authentication']:
        app_urls += (
            '/login',
            'ictv.pages.local_login.LoginPage',
            '/reset',
            'ictv.pages.local_login.GetResetLink',
            '/reset/(.+)',
            'ictv.pages.local_login.ResetPage',
            '/logout',
            'ictv.pages.utils.LogoutPage',
        )

    if 'saml2' in config_file['authentication']:
        app_urls += (
            '/shibboleth',
            'ictv.pages.shibboleth.Shibboleth',
            '/shibboleth_metadata',
            'ictv.pages.shibboleth.MetadataPage',
        )

    # Create a base web.py application
    app = web.application(app_urls, globals())
    app.config = config_file

    app.version = ictv.common.__version__

    with open(os.path.join(get_root_path(), 'info' + os.extsep + 'yaml')) as f:
        # Loads ICTV user info texts
        info_texts = yaml.load(f)

    # Load the SMTP config into web.py
    smtp_conf = app.config.get('smtp', None)
    if smtp_conf:
        web.config.smtp_sendername = smtp_conf['sender_name']
        web.config.smtp_server = smtp_conf['host']
        web.config.smtp_port = smtp_conf['port']
        web.config.smtp_username = smtp_conf.get('username', '')
        web.config.smtp_password = smtp_conf.get('password', '')
        web.config.smtp_starttls = smtp_conf.get('starttls', False)

    # Create a persistent HTTP session storage for the app
    app.session = web.session.Session(
        app,
        OptimisticThreadSafeDisktore(os.path.join(sessions_path, 'sessions')))
    # Populate the web.py templates globals
    template_globals = {
        'session': app.session,
        'get_feedbacks': get_feedbacks,
        'get_next_feedbacks': get_next_feedbacks,
        'pop_previous_form': pop_previous_form,
        'UserPermissions': UserPermissions,
        'json': json,
        'str': str,
        'sorted': sorted,
        'hasattr': hasattr,
        'sidebar_collapse': False,
        'show_header': True,
        'show_footer': True,
        're': re,
        'info': info_texts,
        'make_tooltip': make_tooltip,
        'make_alert': make_alert,
        'escape': html.escape,
        'show_reset_password': '******' in app.config['authentication'],
        'homedomain': lambda: web.ctx.homedomain,
        'generate_secret': generate_secret,
        'version': lambda: app.version,
        'pretty_print_size': pretty_print_size,
        'timesince': timesince,
        'User': User,
        'get_user': lambda: User.get(app.session['user']['id'])
    }
    # Init the web.py renderer used for the admin interface
    template_kwargs = {
        'loc': os.path.join(get_root_path(), 'templates/'),
        'cache': not app.config['debug']['debug_on_error'],
        'globals': template_globals
    }
    app.renderer = web.template.render(base='base', **template_kwargs)

    # Init a second web.py renderer without any base template
    app.standalone_renderer = web.template.render(**template_kwargs)

    # Init loggers
    load_loggers_stats()
    # Determine logging level and user feedback when an internal error occurs based on ICTV core config
    level = logging.INFO
    if app.config['debug']['debug_on_error']:
        level = logging.DEBUG
        app.internalerror = web.debugerror
    loggers_to_init = [
        'app', 'pages', 'screens', 'plugin_manager', 'storage_manager',
        'local_login', 'database', 'transcoding_queue'
    ]
    for logger_name in loggers_to_init:
        init_logger(logger_name,
                    level,
                    rotation_interval=app.config['logs']['rotation_interval'],
                    backup_count=app.config['logs']['backup_count'])

    # Init the renderer used for slide, capsule, channel and screen rendering
    app.ictv_renderer = ICTVRenderer(app)
    # Init the plugin manager, used as a gateway between ICTV core and its plugins.
    app.plugin_manager = PluginManager(app)

    # Init the download manager, a download queue which asynchronously downloads assets from the network
    app.download_manager = DownloadManager()
    # Init the cleanup manager which will regularly cleanup unused cached assets
    app.cleanup_scheduler = CleanupScheduler()
    app.cleanup_scheduler.start()
    # Init the video transcoding queue which will convert videos to WebM format using FFmpeg
    app.transcoding_queue = TranscodingQueue()

    # Add an hook to make session available through web.ctx for plugin webapps
    def session_hook():
        web.ctx.session = app.session
        web.template.Template.globals['session'] = app.session

    app.add_processor(web.loadhook(session_hook))

    # Add a preprocessor to populate web.ctx with meaningful values when app is run behind a proxy
    app.add_processor(proxy_web_ctx_processor)
    # Add a preprocessor to encapsulate every SQL requests in a transaction on a per HTTP request basis
    app.add_processor(
        get_request_errors_preprocessor(logging.getLogger('database'),
                                        logging.getLogger('pages')))
    # Add an general authentication processor to handle user authentication
    app.add_processor(get_authentication_processor(app))
    # Add a hook to clean feedbacks from the previous request and prepare next feedbacks to be shown to the user
    app.add_processor(web.unloadhook(rotate_feedbacks))

    # Instantiate plugins through the plugin manager
    app.plugin_manager.instantiate_plugins(app)

    # Load themes and templates into database
    sqlhub.doInTransaction(load_templates_and_themes)

    return app