def fork_story(session, story, is_self_forking=False): handle = None if story.cover_storage: try: cover_storage = story.cover_storage factory = pyramid_safile.get_factory() handle = factory.create_handle(cover_storage.filename, open(cover_storage.dst, 'rb')) except FileNotFoundError as e: log.error(str(e)) og_image_handle = None if story.og_image: try: og_image = story.og_image factory = pyramid_safile.get_factory() og_image_handle = factory.create_handle(og_image.filename, open(og_image.dst, 'rb')) except FileNotFoundError as e: log.error(str(e)) session.expunge(story) original_story = StoryQuery(session).get_story_by_id(story.id) story.fork_of = story.id story.id = None if is_self_forking: story.name = story.name + '(1)' story.import_handle(handle) make_transient(story) session.add(story) story.localizations = original_story.localizations session.flush() return story
def run_export(_settings, _safile_settings, story_export_id): pyramid_safile.init_factory(_safile_settings) engine = engine_from_config(_settings) DBSession.configure(bind=engine) with transaction.manager: story_export = DBSession.query(ProjectExport) \ .filter(ProjectExport.id == story_export_id) \ .one() characters = CharacterQuery(DBSession).fetch_by_oice(story_export.oice) temp_zip = os.path.join(tempfile.mkdtemp(), 'data.zip') exporter = ScriptExporter( _settings["o2.resize_script"], story_export.oice, temp_zip, characters=characters, ) exporter.export() factory = pyramid_safile.get_factory() handle = factory.create_handle('data.zip', open(temp_zip, 'rb')) story_export.exported_files = handle send_result_request('export', {'id': story_export_id, 'message': 'ok'})
def update_asset(request): asset = request.context try: meta = json.loads(request.POST['meta']) if 'credits' in meta: users = UserQuery(DBSession).fetch_user_by_ids(user_ids=meta['credits']) if users: asset.users = users else: asset.users = [] if 'creditsUrl' in meta: asset.credits_url = meta['creditsUrl'] if 'nameTw' in meta: asset.name_tw = meta['nameTw'] if 'nameEn' in meta: asset.name_en = meta['nameEn'] if 'nameJp' in meta: asset.name_jp = meta['nameJp'] job_id = None if 'asset' in request.POST: asset_file = request.POST['asset'] file_extension = os.path.splitext(asset_file.filename)[1].lower() # Set filename to <type><file extension> asset_file.filename = asset.asset_types[0].type_ + file_extension asset.filename = asset_file.filename if asset.asset_types[0].type_ == 'audio': job_id = uuid.uuid4().hex validate_audio_format(file_extension) handle_audio_asset_files(job_id, [asset], [asset_file]) else: factory = pyramid_safile.get_factory() handle = factory.create_handle(asset_file.filename, asset_file.file) if asset.asset_types[0].folder_name == 'bgimage': bgImageHandler = ResizeBackgroundImage(handle.dst) bgImageHandler.run() asset.import_handle(handle) DBSession.add(asset) response = { 'code': 200, 'message': 'ok', 'asset': asset.serialize(), } if job_id: response['jobId'] = job_id else: asset.library.updated_at = datetime.utcnow() return response except ValueError as e: raise ValidationError(str(e))
def update_library(request): library_id = request.matchdict['library_id'] try: library = LibraryQuery(DBSession)\ .get_library_by_id(library_id) # Hardcode config for now, does not work # if 'config' in request.json_body: # library.config_obj = request.json_body['config'] if 'meta' in request.POST: meta = json.loads(request.POST['meta']) if 'name' in meta: library.name = meta['name'] if 'description' in meta: library.description = meta['description'] if 'license' in meta: library.license = meta['license'] if 'price' in meta: if library.price <= 0: raise ValidationError('ERR_LIBRARY_PRICE_TIER_SHOULD_NOT_BE_ATTACHED') else: library.price = PriceTierQuery(DBSession).get_price_usd_by_tier(meta['price']) if 'launchedAt' in meta and 'isLaunched' in meta: if not meta['isLaunched'] and meta['launchedAt']: library.launched_at = None library.is_public = False elif meta['isLaunched'] and not meta['launchedAt']: library.launched_at = datetime.datetime.utcnow() library.is_public = True if 'coverStorage' in request.POST: cover_storage = request.POST['coverStorage'] factory = pyramid_safile.get_factory() extension = os.path.splitext(cover_storage.filename)[1] filename = 'cover_storage' + extension handle = factory.create_handle(filename, cover_storage.file) library.import_handle(handle) DBSession.add(library) return { 'code': 200, 'message': 'ok', 'library': library.serialize() } except ValueError as e: raise ValidationError(str(e))
def audio_transcodec(original_filename, audio_bytes): fp = io.BufferedRandom(io.BytesIO(audio_bytes)) filename = os.path.splitext(original_filename)[0] tempdir = tempfile.mkdtemp() temp_zip = os.path.join(tempfile.mkdtemp(), filename + '.zip') fdst = open(os.path.join(tempdir, original_filename), 'wb+') shutil.copyfileobj(fp, fdst) mp4_filename = filename + '.mp4' ogg_filename = filename + '.ogg' subprocess.call([ 'ffmpeg', '-i', original_filename, '-c:a', 'aac', '-b:a', '128k', '-movflags', '+faststart', '-vn', '-sn', '-dn', mp4_filename ], cwd=tempdir) subprocess.call([ 'ffmpeg', '-i', original_filename, '-c:a', 'libvorbis', '-qscale:a', '5', '-vn', '-sn', '-dn', ogg_filename ], cwd=tempdir) # no mp4 or ogg files will be generated if ffmpeg operations are unsuccessful file_list = os.listdir(tempdir) if not (mp4_filename in file_list and ogg_filename in file_list): return None subprocess.call(['zip', '-r', temp_zip, '.'], cwd=tempdir) try: factory = pyramid_safile.get_factory() handle = factory.create_handle(os.path.basename(temp_zip), open(temp_zip, 'rb')) os.remove(temp_zip) except FileNotFoundError: return None else: subprocess.call( ['unzip', '-o', handle.dst, '-d', os.path.dirname(handle.dst)]) finally: shutil.rmtree(tempdir) return handle
def create_asset(asset_types, asset_type, meta, asset_file, library_id, user_email, order): credits_url = meta.get('creditsUrl') if 'credits' in meta: users = UserQuery(DBSession).fetch_user_by_ids( user_ids=meta['credits']) else: users = [ UserQuery(DBSession).fetch_user_by_email(email=user_email).one() ] # Set name_en as default name_en = meta['nameEn'] if 'nameEn' in meta else None name_tw = meta['nameTw'] if 'nameTw' in meta else name_en name_jp = meta['nameJp'] if 'nameJp' in meta else name_en file_extension = os.path.splitext(asset_file.filename)[1].lower() # Set filename to <type><file extension> asset_file.filename = asset_types[0].type_ + file_extension handle = None if asset_type == 'bgm' or asset_type == 'se': validate_audio_format(file_extension) else: factory = pyramid_safile.get_factory() handle = factory.create_handle(asset_file.filename, asset_file.file) if asset_type == 'bgimage': bgImageHandler = ResizeBackgroundImage(handle.dst) bgImageHandler.run() asset = Asset.from_handle(handle=handle, asset_types=asset_types, name_tw=name_tw, name_en=name_en, name_jp=name_jp, library_id=library_id, filename=asset_file.filename, users=users, credits_url=credits_url, order=order) return asset
def update_story(request): story_id = request.matchdict['story_id'] try: story = StoryQuery(DBSession)\ .get_story_by_id(story_id) query_language = fetch_story_query_language(request, story) # Hardcode config for now, does not work # if 'config' in request.json_body: # story.config_obj = request.json_body['config'] if 'meta' in request.POST: meta = json.loads(request.POST['meta']) if 'name' in meta: story.set_name(meta['name'], query_language) if 'description' in meta: story.set_description(meta['description'], query_language) # /* Do not allow change of primary language */ # if 'language' in meta: # story.language = meta['language'] if 'coverStorage' in request.POST: cover_storage = request.POST['coverStorage'] factory = pyramid_safile.get_factory() extension = os.path.splitext(cover_storage.filename)[1] filename = 'cover_storage' + extension handle = factory.create_handle(filename, cover_storage.file) story.import_handle(handle, query_language) if 'titleLogo' in request.POST: title_logo = request.POST['titleLogo'] factory = pyramid_safile.get_factory() extension = os.path.splitext(title_logo.filename)[1] filename = 'title_logo' + extension handle = factory.create_handle(filename, title_logo.file) story.import_title_logo_handle(handle) if 'heroImage' in request.POST: hero_image = request.POST['heroImage'] factory = pyramid_safile.get_factory() extension = os.path.splitext(hero_image.filename)[1] filename = 'hero_image' + extension handle = factory.create_handle(filename, hero_image.file) story.import_hero_image_handle(handle) if 'ogImage' in request.POST: og_image = request.POST['ogImage'] factory = pyramid_safile.get_factory() extension = os.path.splitext(og_image.filename)[1] filename = 'og_image.jpg' handle = factory.create_handle(filename, og_image.file) ogImageHandler = ComposeOgImage(handle.dst) ogImageHandler.run() story.import_og_image_handle(handle, query_language) DBSession.add(story) return {'code': 200, 'story': story.serialize(query_language)} except ValueError as e: raise ValidationError(str(e))
def add_library(request): try: meta = json.loads(request.POST['meta']) user = UserQuery(DBSession).fetch_user_by_email( email=request.authenticated_userid).one() library_name = 'New Library' if 'name' in meta: library_name = meta['name'] library = Library.create(DBSession, name=library_name) if 'description' in meta: library.description = meta['description'] if 'license' in meta: library.license = meta['license'] if 'type' in meta: library_type = meta['type'] if library_type == 'public': library.price = 0 library.is_public = True library.launched_at = datetime.datetime.utcnow() elif library_type == 'private': if not user.is_paid: raise HTTPForbidden library.price = -1 if 'price' in meta: if library_type == 'forSale': library.price = PriceTierQuery( DBSession).get_price_usd_by_tier(meta['price']) else: raise ValidationError( 'ERR_LIBRARY_PRICE_TIER_SHOULD_NOT_BE_ATTACHED') else: # should provide type to determine library type at first raise ValidationError('ERR_ADD_LIBRARY_TYPE_INFORMATION_MISSING') if 'coverStorage' in request.POST: cover_storage = request.POST['coverStorage'] factory = pyramid_safile.get_factory() extension = os.path.splitext(cover_storage.filename)[1] filename = 'cover_storage' + extension handle = factory.create_handle(filename, cover_storage.file) library.import_handle(handle) user.libraries.append(library) # preselect library created by user user.libraries_selected.append(library) DBSession.add(library) # flush because we need an ID DBSession.flush() return { "library": library.serialize(), "message": "ok", "code": 200, } except ValueError as e: raise ValidationError(str(e))
def login_user(request): try: email = request.json_body.get('email') firebase_token = request.json_body.get('firebaseToken') is_anonymous = request.json_body.get('isAnonymous') firebase_user_id = request.json_body.get('firebaseUserId') google_token = request.json_body.get('googleToken') branch_data = request.json_body.get('branchData') prev_firebase_user_id = request.json_body.get('prevFirebaseUserId') except ValueError: raise ValidationError('ERR_INVALID_AUTH_PARAM') if get_is_production() or email != 'oice-dev': try: auth.verify_id_token(firebase_token) except ValueError: raise ValidationError('ERR_FIREBASE_AUTH_ERROR') except AppIdentityError: raise ValidationError('ERR_INVALID_FIREBASE_TOKEN') old_auth_id = authenticated_userid(request) fetch_username = email if is_anonymous and firebase_user_id: fetch_username = firebase_user_id # Init these bool here to avoid scope issue is_first_login = False is_trial_ended = False log_dict = { 'topic': 'actionUser', 'isAnonymous': 'true' if is_anonymous else 'false', 'isDeeplink': 'false', } if branch_data: log_dict.update({ 'channel': dict_get_value(branch_data, ['~channel'], 'direct'), 'isDeeplink': 'true', }) log_dict = set_basic_info_referrer_log( dict_get_value(branch_data, ['+referrer'], 'none'), dict_get_value(branch_data, ['referrer2'], 'none'), log_dict) oice_source = OiceQuery(DBSession).get_by_uuid( dict_get_value(branch_data, ['uuid'])) if oice_source: log_dict = set_basic_info_oice_source_log( oice_source.story.users[0], oice_source, log_dict) try: user = UserQuery(DBSession).fetch_user_by_email( email=fetch_username).one() except NoResultFound: user = User(email=fetch_username, is_anonymous=is_anonymous) if firebase_user_id: user.display_name = firebase_user_id DBSession.add(user) DBSession.flush() is_first_login = True is_trial_ended = False # log log_dict.update({'action': 'createUser'}) log_dict = set_basic_info_user_log(user, log_dict) log_dict = set_basic_info_log(request, log_dict) log_message(KAFKA_TOPIC_USER, log_dict) else: user.last_login_at = datetime.datetime.utcnow() if not user.is_anonymous: sample_story = StoryQuery(DBSession).get_sample_story( user.language) story = next((user_story for user_story in user.stories if sample_story.id == user_story.fork_of), None) if not story: story = fork_story(DBSession, sample_story) sample_oice = OiceQuery(DBSession).get_sample_oice( language=user.language) oice = fork_oice(DBSession, story, sample_oice) user.stories.append(story) if user.is_trial: if user.is_paid( ) and user.expire_date < datetime.datetime.utcnow(): user.role = 'user' update_user_mailchimp_stage(user=user, stage=5) if user.is_free(): user.is_trial = False is_trial_ended = True else: # if user.is_free() and not user.expire_date: # Disabled trial due to busines request # UserOperations.start_trial(user) is_trial_ended = False is_first_login = False if not old_auth_id or request.headers.get('x-oice-app-version'): # log is_redeem_account = prev_firebase_user_id and firebase_user_id != prev_firebase_user_id log_dict.update({ 'action': 'redeemAccount' if is_redeem_account else 'login', }) log_dict = set_basic_info_user_log(user, log_dict) log_dict = set_basic_info_log(request, log_dict) log_message(KAFKA_TOPIC_USER, log_dict) if is_redeem_account: handle_anonymous_user_app_story_progress(is_existing_user=True, \ prev_user_email=prev_firebase_user_id, \ new_user=user) photo_url = request.json_body.get('photoURL', None) if photo_url and user.avatar_storage is None: r = requests.get(photo_url) avatar = BytesIO(r.content) factory = pyramid_safile.get_factory() handle = factory.create_handle('avatar.png', avatar) user.import_handle(handle) language = request.json_body.get('language', None) normalized_language = None if language and user.language is None: normalized_language = normalize_language(language) if normalized_language: user.language = normalized_language # derive ui_language when creating user user.ui_language = normalize_ui_language(normalized_language) if (is_first_login or user.is_anonymous) and google_token: display_name = request.json_body.get('displayName', None) if email: user.email = email if not display_name: display_name = email.split('@')[0] if display_name: user.display_name = display_name sample_story = StoryQuery(DBSession).get_sample_story( normalized_language) story = fork_story(DBSession, sample_story) sample_oice = OiceQuery(DBSession).get_sample_oice( language=normalized_language) oice = fork_oice(DBSession, story, sample_oice) # open a public library for new user library = create_user_public_library(DBSession, user.display_name) user.stories.append(story) user.libraries.append(library) user.libraries_selected.append(library) # pre-select default libraries for new user default_libs = LibraryQuery(DBSession).fetch_default_libs() user.libraries_purchased.extend(default_libs) user.libraries_selected.extend(default_libs) # Disabled trial due to busines request # UserOperations.start_trial(user) user.last_login_at = datetime.datetime.utcnow() subscribe_mailchimp(google_token, user, language=language) # update elastic search when create user update_elastic_search_user(user.display_name, email) if is_first_login and request.headers.get('x-oice-app-version'): # log log_dict.update({'action': 'bindAccount'}) log_dict = set_basic_info_user_log(user, log_dict) log_dict = set_basic_info_log(request, log_dict) log_message(KAFKA_TOPIC_USER, log_dict) handle_anonymous_user_app_story_progress(is_existing_user=False, \ prev_user_email=prev_firebase_user_id, \ new_user=user) user.is_anonymous = False serialize_user = user.serialize() serialize_user['isFirstLogin'] = is_first_login serialize_user['isTrialEnded'] = is_trial_ended serialize_user['intercomUserHash'] = hmac.new( bytes(get_intercom_secret_key().encode('utf-8')), bytes(str(user.id).encode('utf-8')), digestmod=hashlib.sha256).hexdigest() response = Response() response.status_code = 200 response.headers = remember(request, user.email) response.content_type = 'application/json' response.charset = 'UTF-8' response.text = json.dumps({'code': 200, 'user': serialize_user}) return response
def update_profile(request): try: email = request.authenticated_userid user = UserQuery(DBSession).fetch_user_by_email(email=email).one() log_dict = { 'change': [], 'action': 'changeSetting', } if 'meta' in request.POST: meta = json.loads(request.POST['meta']) if 'displayName' in meta: log_dict['change'].append({ 'whichFieldChange': 'user', 'beforeChange': user.display_name, 'afterChange': meta['displayName'], }) user.display_name = meta['displayName'] # update elastic search when update profile update_elastic_search_user(user.display_name, email) if 'username' in meta: log_dict['change'].append({ 'whichFieldChange': 'username', 'beforeChange': user.username, 'afterChange': meta['username'], }) user.username = meta['username'] if 'description' in meta: log_dict['change'].append({ 'whichFieldChange': 'description', 'beforeChange': user.description, 'afterChange': meta['description'], }) user.description = meta['description'] if 'seekingSubscriptionMessage' in meta: new_value = meta['seekingSubscriptionMessage'] log_dict['change'].append({ 'whichFieldChange': 'seekingSubscriptionMessage', 'beforeChange': user.seeking_subscription_message, 'afterChange': new_value, }) user.seeking_subscription_message = new_value if 'language' in meta: language = normalize_language(meta['language']) log_dict['change'].append({ 'whichFieldChange': 'language', 'beforeChange': user.language, 'afterChange': language, }) user.language = language if 'uiLanguage' in meta: log_dict['change'].append({ 'whichFieldChange': 'ui_language', 'beforeChange': user.ui_language, 'afterChange': meta['uiLanguage'], }) user.ui_language = meta['uiLanguage'] if 'tutorialState' in meta: newValue = int( "".join(str(int(item)) for item in meta['tutorialState']), 2) log_dict['change'].append({ 'whichFieldChange': 'tutorialState', 'beforeChange': user.tutorial_state, 'afterChange': newValue, }) user.tutorial_state = newValue if 'likeCoinId' in meta: log_dict['change'].append({ 'whichFieldChange': 'likeCoinId', 'beforeChange': user.like_coin_id, 'afterChange': meta['likeCoinId'], }) user.like_coin_id = meta['likeCoinId'] if 'avatar' in request.POST: avatar_file = request.POST['avatar'] factory = pyramid_safile.get_factory() extension = os.path.splitext(avatar_file.filename)[1] filename = 'avatar' + extension handle = factory.create_handle(filename, avatar_file.file) log_dict['change'].append({ 'whichFieldChange': 'avatar', 'beforeChange': json.dumps(user.avatar_storage.descriptor) if user.avatar_storage else None, 'afterChange': json.dumps(handle.descriptor) if handle else None, }) user.import_handle(handle) update_mailchimp_field(user=user) log_dict = set_basic_info_user_log(user, log_dict) log_dict = set_basic_info_log(request, log_dict) log_message(KAFKA_TOPIC_USER, log_dict) return {"user": user.serialize(), "message": "ok", "code": 200} except ValueError as e: raise ValidationError(str(e))