Ejemplo n.º 1
0
def run_export(_settings, _safile_settings, story_export_id):

    pyramid_safile.init_factory(_safile_settings)

    engine = engine_from_config(_settings)
    DBSession.configure(bind=engine)

    with transaction.manager:
        story_export = DBSession.query(ProjectExport) \
                                .filter(ProjectExport.id == story_export_id) \
                                .one()

        characters = CharacterQuery(DBSession).fetch_by_oice(story_export.oice)

        temp_zip = os.path.join(tempfile.mkdtemp(), 'data.zip')
        exporter = ScriptExporter(
            _settings["o2.resize_script"],
            story_export.oice,
            temp_zip,
            characters=characters,
        )

        exporter.export()

        factory = pyramid_safile.get_factory()
        handle = factory.create_handle('data.zip', open(temp_zip, 'rb'))

        story_export.exported_files = handle

    send_result_request('export', {'id': story_export_id, 'message': 'ok'})
Ejemplo n.º 2
0
def transcode_audio_assets(_settings, _safile_settings, job_id, assets,
                           asset_files):
    pyramid_safile.init_factory(_safile_settings)

    engine = engine_from_config(_settings)
    DBSession.configure(bind=engine)

    socket_url = 'audio/convert/' + job_id

    try:
        assets = AssetQuery(DBSession).get_by_ids(
            [asset.id for asset in assets])
        updated_library = False

        for asset, asset_file in zip(assets, asset_files):
            handle = audio_transcodec(asset.filename, asset_file)
            if handle:
                asset.import_handle(handle)
                DBSession.add(asset)
            elif not asset.storage:
                send_result_request(socket_url, {
                    'stage': 'delete',
                })
                # only delete asset that is unable to transcode if it is newly created
                DBSession.delete(asset)

            DBSession.flush()

            send_result_request(
                socket_url, {
                    'stage': 'transcode',
                    'assetId': asset.id,
                    'error': None if handle else 'ERR_AUDIO_TRANSCODE_FAILURE',
                })

            if not updated_library and handle:
                # update library time when new asset is added
                library = LibraryQuery(DBSession).get_library_by_id(
                    asset.library_id)
                library.updated_at = datetime.utcnow()
                library.launched_at = datetime.utcnow()
                updated_library = True
    except Exception as error:
        send_result_request(
            socket_url, {
                'error': True,
                'key': 'ERR_AUDIO_IMPORT_ERROR',
                'interpolation': {
                    'message': str(error),
                },
            })
    else:
        transaction.commit()
        send_result_request(socket_url, {
            'stage': 'finished',
        })
Ejemplo n.º 3
0
def main(global_config, **settings):
    """ This function returns a Pyramid WSGI application.
    """
    authn_policy = AuthTktAuthenticationPolicy(
        settings['auth.secret'],
        secure=True,
        http_only=True,
        timeout=int(settings['auth.timeout']),
        max_age=int(settings['auth.timeout']),
        callback=groupfinder)
    authz_policy = ACLAuthorizationPolicy()
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    Base.metadata.bind = engine
    config = Configurator(settings=settings,
                          authentication_policy=authn_policy,
                          authorization_policy=authz_policy,
                          root_factory=Root)

    if config.get_settings().get('cors.preflight', None) == 'true':
        config.include('.cors')
        config.add_cors_preflight_handler()

    config.include("cornice")
    config.include('pyramid_mako')
    config.add_static_view('static', 'static', cache_max_age=3600)
    upload_dir = os.path.abspath(settings['upload_dir'])
    config.add_static_view('upload', upload_dir, cache_max_age=3600)
    config.add_renderer('jsonp', JSONP(param_name='callback'))

    config.scan(ignore=['modmod.scripts', 'modmod.tests'])
    config.include('.config')
    config.include('modmod.views')
    config.include('modmod.views.util')

    safile_settings = {
        'file.storages': ['fs:' + settings['upload_dir']],
        'fs.' + settings['upload_dir'] + '.asset_path': '/upload/',
    }

    pyramid_safile.init_factory(safile_settings)

    init_worker(settings, safile_settings)

    stripe.api_key = settings['stripe.api_key']

    if not "CI" in os.environ and os.path.isfile(
            'secret/fbServiceAccountKey.json'):
        cred = credentials.Certificate('secret/fbServiceAccountKey.json')
        default_firebase_app = firebase_admin.initialize_app(cred)

    signal.signal(signal.SIGINT, sigint_handler)
    signal.signal(signal.SIGTERM, sigint_handler)
    signal.signal(signal.SIGHUP, sigint_handler)

    return config.make_wsgi_app()
Ejemplo n.º 4
0
def import_oice_script(_settings, _safile_settings, user_email, job_id,
                       oice_id, script, language):
    socket_url = 'import/' + job_id

    try:
        # Parsing
        send_result_request(socket_url, {
            'stage': 'parsing',
        })

        used_character_ids, \
        used_asset_ids, \
        used_macro_names, \
        serialized_blocks = parse_script(script)

        pyramid_safile.init_factory(_safile_settings)

        engine = engine_from_config(_settings)
        DBSession.configure(bind=engine)

        oice = OiceQuery(DBSession).get_by_id(oice_id)
        used_library_ids = set()

        send_result_request(socket_url, {
            'stage': 'validating',
        })

        # Check characters
        characters = CharacterQuery(DBSession).fetch_by_ids(used_character_ids)
        used_valid_character_ids = set(character.id
                                       for character in characters)
        invalid_character_ids = used_character_ids - used_valid_character_ids
        if len(invalid_character_ids) > 0:
            raise ScriptImportParserError(
                'ERR_IMPORT_SCRIPT_CHARACTER_NOT_FOUND', {
                    'characterIds': str(invalid_character_ids),
                })
        used_library_ids.update(character.library_id
                                for character in characters)

        # Check assets
        assets = AssetQuery(DBSession).get_by_ids(used_asset_ids)
        used_valid_asset_ids = set(asset.id for asset in assets)
        invalid_asset_ids = used_asset_ids - used_valid_asset_ids
        if len(invalid_asset_ids) > 0:
            raise ScriptImportParserError(
                'ERR_IMPORT_SCRIPT_ASSET_NOT_FOUND', {
                    'assetIds': str(invalid_asset_ids),
                })
        used_library_ids.update(asset.library_id for asset in assets)

        # Select used library for user if needed
        user = UserQuery(DBSession).fetch_user_by_email(user_email).one()
        used_libraries = LibraryQuery(DBSession).get_librarys_by_ids(
            used_library_ids)
        for library in used_libraries:
            if library not in user.libraries_selected:
                if library.price < 0:
                    raise ScriptImportParserError(
                        'ERR_IMPORT_SCRIPT_LIBRARY_NOT_OWNED', {
                            'libraryId': library.id,
                            'libraryName': library.name,
                        })
                elif library not in user.libraries_purchased:
                    raise ScriptImportParserError(
                        'ERR_IMPORT_SCRIPT_LIBRARY_NOT_PURCHASED', {
                            'libraryId': library.id,
                            'libraryName': library.name,
                        })
        user.libraries_selected.extend(used_libraries)

        # Insert blocks to database
        macros = DBSession.query(Macro).filter(
            Macro.tagname.in_(used_macro_names)).all()
        macros_dict = dict((macro.tagname, macro) for macro in macros)

        parent_block = DBSession.query(Block) \
                                .filter(Block.oice_id == oice.id) \
                                .order_by(Block.position.desc()) \
                                .first()
        position = parent_block.position + 1 if parent_block else 0

        characters_dict = dict(
            (character.id, character) for character in characters)

        for index, block_dict in enumerate(serialized_blocks):
            macro_name = block_dict['macro']
            macro = macros_dict[macro_name]

            block = Block(macro=macro, oice=oice, position=position + index)
            DBSession.add(block)

            ensure_block_default_value(DBSession, block, language)

            attributes = block_dict['attributes']
            if macro_name == 'characterdialog':
                character = characters_dict[attributes['character']]

                if 'name' in attributes and not character.is_generic:
                    raise ScriptImportParserError(
                        'ERR_IMPORT_SCRIPT_CHARACTER_FORBID_RENAME', {
                            'characterId': character.id,
                        })

                # set fg as the first one for character
                attributes['fg'] = next(fg.id for fg in character.fgimages)

            update_block_attributes(DBSession, block, attributes, language)

            send_result_request(
                socket_url, {
                    'stage': 'inserting',
                    'progress':
                    float(index + 1) / len(serialized_blocks) * 100,
                })

    except ScriptImportParserError as error:
        send_result_request(
            socket_url, {
                'error': True,
                'key': error.key,
                'interpolation': error.interpolation,
            })
    except Exception as error:
        send_result_request(
            socket_url, {
                'error': True,
                'key': 'ERR_IMPORT_SCRIPT_UNKNOWN_ERROR',
                'interpolation': {
                    'message': str(error),
                },
            })
    else:
        transaction.commit()
        send_result_request(socket_url, {
            'stage': 'finished',
        })
Ejemplo n.º 5
0
def run_build(_settings, _safile_settings, oice_id, ks_view_url,
              oice_communication_url, og_image_button_url, og_image_origin_url,
              email, isPreview, batchId):

    pyramid_safile.init_factory(_safile_settings)

    engine = engine_from_config(_settings)
    DBSession.configure(bind=engine)

    with transaction.manager:
        oice = OiceQuery(DBSession).get_by_id(oice_id)
        characters = CharacterQuery(DBSession).fetch_by_oice(oice)

        payload = {'title': oice.filename, 'id': oice_id, 'url': ks_view_url}
        if batchId:
            payload["batchId"] = batchId

        try:
            temp_folder = tempfile.mkdtemp()
            output_path = temp_folder + '/build'
            builder = KSScriptBuilder(
                _settings["o2.build_script"],
                _settings["o2.resize_script"],
                oice,
                output_path,
                ks_view_url,
                oice_communication_url,
                og_image_button_url,
                og_image_origin_url,
                characters=characters,
            )

            builder.build()

            view_path = _settings["o2.output_dir"] % {'ks_uuid': oice.uuid}

            # remove the old version
            if os.path.exists(view_path):
                shutil.rmtree(view_path)
            if not isPreview and _settings["gcloud.enable_upload"] in {
                    '1', 'true', True
            }:
                client = storage.Client.from_service_account_json(
                    os.path.abspath(_settings["gcloud.json_path"]),
                    project=_settings["gcloud.project_id"])
                if client:
                    bucket = client.get_bucket(_settings["gcloud.bucket_id"])

                    for dir_, _, files in os.walk(output_path):
                        for fileName in files:
                            relDir = os.path.relpath(dir_, output_path)
                            if '.' != relDir:
                                blob_path = os.path.join(
                                    'view', oice.uuid, relDir, fileName)
                            else:
                                blob_path = os.path.join(
                                    'view', oice.uuid, fileName)
                            blob = bucket.get_blob(blob_path)
                            if blob:
                                md5_hash = blob.md5_hash
                                if md5_hash:
                                    if md5_b64(os.path.join(
                                            dir_, fileName)) == md5_hash:
                                        log.info(
                                            'Identical md5 exist on gcloud, skipping: '
                                            + fileName)
                                        continue
                            else:
                                blob = bucket.blob(blob_path)
                            log.info('Uploading to google cloud: ' + fileName)
                            blob.upload_from_filename(
                                filename=os.path.join(dir_, fileName))
                            if fileName == 'script.js':
                                blob.cache_control = "private, max-age=0, no-transform"
                                blob.patch()

            if not isPreview:
                oice.story.updated_at = datetime.utcnow()

            # move the new build to folder
            shutil.move(output_path,
                        _settings["o2.output_dir"] % {'ks_uuid': oice.uuid})
            shutil.rmtree(temp_folder)
        except Exception:
            if email:
                update_user_mailchimp_stage(email=email, stage=3)

            log.exception('')

            payload["message"] = str(sys.exc_info()[1])
            send_result_request('build', payload)

        else:
            if init_slack(_settings) and not isPreview and oice.is_public():
                author = oice.story.users[0]
                if not author.is_admin():
                    send_oice_publish_message_into_slack(
                        author, oice, ks_view_url, og_image_origin_url)
            if email:
                update_user_mailchimp_stage(email=email, stage=4)

            payload["message"] = "ok"
            send_result_request('build', payload)