def it_logs_args_if_development_and_no_google_config_when_upload_from_base64string(self, app, mocker): mocker.patch.dict('app.storage.utils.current_app.config', { 'ENVIRONMENT': 'development', 'GOOGLE_APPLICATION_CREDENTIALS': '', }) mock_logger = mocker.patch("app.storage.utils.current_app.logger.info") store = Storage('test-store') store.upload_blob_from_base64string('test.png', '2019/new_test.png', self.base64img) assert mock_logger.called
def it_logs_args_if_development_and_no_google_config_when_blob_exists(self, app, mocker): mocker.patch.dict('app.storage.utils.current_app.config', { 'ENVIRONMENT': 'development', 'GOOGLE_APPLICATION_CREDENTIALS': '', }) mock_logger = mocker.patch("app.storage.utils.current_app.logger.info") store = Storage('test-store') store.blob_exists('prfix', 'delimiter') assert mock_logger.called
def it_uploads_blob_from_base64string(self, app, mocker): mocker.patch.dict('os.environ', { 'GOOGLE_APPLICATION_CREDENTIALS': 'path/to/creds' }) mocker.patch("google.cloud.storage.Client", MockStorageClient) mocker.patch("google.auth.compute_engine.Credentials") store = Storage('test-store') store.upload_blob_from_base64string('test.png', '2019/new_test.png', self.base64img) assert store.bucket.blob.source_string == base64.b64decode(self.base64img)
def it_logs_args_if_development_and_no_google_config_when_upload_file(self, app, mocker): mocker.patch.dict('app.storage.utils.current_app.config', { 'ENVIRONMENT': 'development', 'GOOGLE_APPLICATION_CREDENTIALS': '', }) mock_logger = mocker.patch("app.storage.utils.current_app.logger.info") store = Storage('test-store') store.upload_blob('source', 'destination') assert mock_logger.called
def it_checks_blob_exists(self, app, mocker): mocker.patch.dict('os.environ', { 'GOOGLE_APPLICATION_CREDENTIALS': 'path/to/creds' }) mocker.patch("google.cloud.storage.Client", MockStorageClient) mocker.patch("google.auth.compute_engine.Credentials") store = Storage('test-store') res = store.blob_exists('prefix', 'delimiter') assert store.bucket.prefix == 'prefix' assert store.bucket.delimiter == 'delimiter' assert res
def it_uploads_a_file(self, app, mocker): mocker.patch.dict('os.environ', { 'GOOGLE_APPLICATION_CREDENTIALS': 'path/to/creds' }) mocker.patch("google.cloud.storage.Client", MockStorageClient) mocker.patch("google.auth.compute_engine.Credentials") store = Storage('test-store') store.upload_blob('source', 'destination') assert store.bucket.destination_filename == 'destination' assert store.bucket.blob.source_filename == 'source' assert store.bucket.blob.public
def it_uses_api_key_for_storage_client_with_google_creds_envs(self, app, mocker): mocker.patch.dict('os.environ', { 'GOOGLE_APPLICATION_CREDENTIALS': 'path/to/creds' }) mock_google_creds = mocker.patch("google.auth.compute_engine.Credentials") mocker.patch("google.cloud.storage.Client", MockEmptyStorageClient) store = Storage('test-store') assert not mock_google_creds.called assert store.storage_client.list_buckets() == ['test-store']
def it_doesnt_create_the_bucket_if_it_exists(self, app, mocker): mocker.patch.dict('os.environ', { 'GOOGLE_APPLICATION_CREDENTIALS': 'path/to/creds' }) mocker.patch("google.cloud.storage.Client", MockStorageClient) mock_google_creds = mocker.patch("google.auth.compute_engine.Credentials") store = Storage('test-store') assert not mock_google_creds.called assert not store.bucket.bucket_created assert store.bucket.name == 'test-store'
def it_uses_gce_creds_for_storage_client_without_google_creds_envs(self, app, mocker): mocker.patch.dict('os.environ', { 'GOOGLE_APPLICATION_CREDENTIALS': '', 'PROJECT': 'test-project' }) mock_google_creds = mocker.patch("google.auth.compute_engine.Credentials", return_value='google-credentials') mocker.patch("google.cloud.storage.Client", MockEmptyStorageClient) store = Storage('test-store') assert mock_google_creds.called assert store.storage_client.credentials == 'google-credentials' assert store.storage_client.project == 'test-project' assert store.storage_client.list_buckets() == ['test-store']
def paypal_ipn(): VERIFY_URL = current_app.config['PAYPAL_VERIFY_URL'] params = request.form.to_dict(flat=False) current_app.logger.info('IPN params: %r', params) params['cmd'] = '_notify-validate' headers = { 'content-type': 'application/x-www-form-urlencoded', 'user-agent': 'Python-IPN-Verification-Script' } r = requests.post(VERIFY_URL, params=params, headers=headers, verify=True) r.raise_for_status() # Check return message and take action as needed if r.text == 'VERIFIED': current_app.logger.info('VERIFIED: %s', params['txn_id']) data = {} for key in params.keys(): if isinstance(params[key], list): data[key] = params[key][0] else: data[key] = params[key] order_data, tickets, events = parse_ipn(data) if not order_data: return 'Paypal IPN no order created' order_data['params'] = json.dumps(params) order = Order(**order_data) dao_create_record(order) for i, _ticket in enumerate(tickets): _ticket['order_id'] = order.id ticket = Ticket(**_ticket) dao_create_record(ticket) tickets[i]['ticket_id'] = ticket.id storage = Storage(current_app.config['STORAGE']) message = "<p>Thank you for your order:<p>" for i, event in enumerate(events): link_to_post = '{}{}'.format( current_app.config['API_BASE_URL'], url_for('.use_ticket', ticket_id=tickets[i]['ticket_id'])) img = pyqrcode.create(link_to_post) buffer = io.BytesIO() img.png(buffer, scale=2) img_b64 = base64.b64encode(buffer.getvalue()) target_image_filename = '{}{}'.format('qr_codes', str(tickets[i]['ticket_id'])) storage.upload_blob_from_base64string('qr.code', target_image_filename, img_b64) message += '<div><span><img src="{}/{}"></span>'.format( current_app.config['IMAGES_URL'], target_image_filename) event_date = dao_get_event_date_by_id(tickets[i]['eventdate_id']) minutes = ':%M' if event_date.event_datetime.minute > 0 else '' message += "<span>{} on {}</span></div>".format( event.title, event_date.event_datetime.strftime( '%-d %b at %-I{}%p'.format(minutes))) send_email(order.email_address, 'New Acropolis Event Tickets', message) elif r.text == 'INVALID': current_app.logger.info('INVALID %r', params['txn_id']) else: current_app.logger.info('UNKNOWN response %r', params['txn_id']) return 'Paypal IPN'
def create_event(): data = request.get_json(force=True) event_year = None validate(data, post_create_event_schema) try: dao_get_event_type_by_id(data['event_type_id']) except NoResultFound: raise InvalidRequest( 'event type not found: {}'.format(data['event_type_id']), 400) try: dao_get_venue_by_id(data['venue_id']) except NoResultFound: raise InvalidRequest('venue not found: {}'.format(data['venue_id']), 400) event = Event(event_type_id=data['event_type_id'], title=data['title'], sub_title=data.get('sub_title'), description=data['description'], booking_code='', fee=data.get('fee'), conc_fee=data.get('conc_fee'), multi_day_fee=data.get('multi_day_fee'), multi_day_conc_fee=data.get('multi_day_conc_fee'), venue_id=data.get('venue_id'), event_state=data.get('event_state', DRAFT)) for event_date in data.get('event_dates'): if not event_year: event_year = event_date['event_date'].split('-')[0] speakers = [] for s in event_date.get('speakers', []): speaker = dao_get_speaker_by_id(s['speaker_id']) speakers.append(speaker) e = EventDate(event_datetime=event_date['event_date'], end_time=event_date.get('end_time'), speakers=speakers) dao_create_event_date(e) event.event_dates.append(e) dao_create_event(event) if event.fee: event_type = dao_get_event_type_by_id(event.event_type_id) p = PayPal() booking_code = p.create_update_paypal_button( str(event.id), event.title, event.fee, event.conc_fee, event.multi_day_fee, event.multi_day_conc_fee, True if event_type.event_type == 'Talk' else False) dao_update_event(event.id, booking_code=booking_code) image_filename = data.get('image_filename') image_data = data.get('image_data') storage = Storage(current_app.config['STORAGE']) if image_data: target_image_filename = '{}/{}'.format(event_year, str(event.id)) storage.upload_blob_from_base64string(image_filename, target_image_filename, image_data) image_filename = target_image_filename elif image_filename: if not storage.blob_exists(image_filename): raise InvalidRequest('{} does not exist'.format(image_filename), 400) event.image_filename = image_filename dao_update_event(event.id, image_filename=image_filename) return jsonify(event.serialize()), 201
def import_events(): data = request.get_json(force=True) validate(data, post_import_events_schema) errors = [] events = [] for item in data: err = '' event = Event.query.filter(Event.old_id == item['id']).first() if not event: speakers = [] event_type = dao_get_event_type_by_old_id(item['Type']) if not event_type: err = '{} event type not found: {}'.format( item['id'], item['Type']) current_app.logger.info(err) errors.append(err) if item['Speaker']: for s in re.split(r' and | & ', item['Speaker']): speaker = dao_get_speaker_by_name(s) if not speaker: err = '{} speaker not found: {}'.format( item['id'], item['Speaker']) current_app.logger.info(err) errors.append(err) else: speakers.append(speaker) venue = dao_get_venue_by_old_id(item['venue']) if not venue: err = '{} venue not found: {}'.format(item['id'], item['venue']) current_app.logger.info(err) errors.append(err) if err: continue event = Event(old_id=item['id'], event_type_id=event_type.id, title=item['Title'], sub_title=item['SubTitle'], description=item['Description'], booking_code=item['BookingCode'], image_filename=item['ImageFilename'], fee=item['Fee'], conc_fee=item['ConcFee'], multi_day_fee=item['MultiDayFee'], multi_day_conc_fee=item['MultiDayConcFee'], duration=item['Duration'], venue_id=venue.id, event_state=APPROVED) def add_event_date(event_datetime): event_date = EventDate( event_datetime=event_datetime, duration=item['Duration'], fee=item['Fee'], conc_fee=item['ConcFee'], multi_day_fee=item['MultiDayFee'], multi_day_conc_fee=item['MultiDayConcFee'], venue_id=venue.id) dao_create_event_date(event_date, speakers) event.event_dates.append(event_date) add_event_date(item['StartDate']) for i in range(2, 5): if item['StartDate{}'.format(i)] > '0000-00-00 00:00:00': add_event_date(item['StartDate{}'.format(i)]) events.append(event) dao_create_event(event) current_app.logger.info(u'added event {} - {}'.format( event.old_id, event.title)) else: err = u'event already exists: {} - {}'.format( event.old_id, event.title) current_app.logger.info(err) errors.append(err) if is_running_locally() and item[ 'ImageFilename'] and item['ImageFilename'] != '../spacer.gif': storage = Storage(current_app.config['STORAGE']) if not storage.blob_exists(item['ImageFilename']): fname = "./data/events/{}".format(item['ImageFilename']) if os.path.isfile(fname): storage.upload_blob(fname, item['ImageFilename']) else: err = '{} not found for {}'.format(fname, event.old_id) current_app.logger.info(err) errors.append(err) else: current_app.logger.info('{} found'.format( item['ImageFilename'])) res = {"events": [e.serialize() for e in events]} if errors: res['errors'] = errors return jsonify(res), 201 if events else 400 if errors else 200
def update_event(event_id): data = request.get_json(force=True) current_app.logger.info('Update event: {}'.format(data)) validate(data, post_update_event_schema) try: event = dao_get_event_by_id(event_id) except NoResultFound: raise InvalidRequest('event not found: {}'.format(event_id), 400) errs = [] event_dates = [] event_data = {} if data.get('event_state') == REJECTED: new_rejects = [ r for r in data.get('reject_reasons') if not r.get('id') ] if not new_rejects: raise InvalidRequest('rejected event requires new reject reason', 400) elif data.get('event_state') == APPROVED: if data.get('reject_reasons'): rejects = [ r for r in data.get('reject_reasons') if not r.get('resolved') ] if rejects: raise InvalidRequest( 'approved event should not have any reject reasons', 400) data_event_dates = data.get('event_dates') if data_event_dates: serialized_event_dates = event.serialize_event_dates() data_event_dates__dates = [e['event_date'] for e in data_event_dates] serialized_event_dates__dates = [ e['event_datetime'] for e in serialized_event_dates ] diff_add = set(data_event_dates__dates).difference( serialized_event_dates__dates) intersect = set(data_event_dates__dates).intersection( serialized_event_dates__dates) dates_to_add = [ e for e in data_event_dates if e['event_date'] in diff_add ] dates_to_update = [ e for e in data_event_dates if e['event_date'] in intersect ] for _date in dates_to_add: speakers = [] for s in _date.get('speakers', []): speaker = dao_get_speaker_by_id(s['speaker_id']) speakers.append(speaker) e = EventDate(event_id=event_id, event_datetime=_date['event_date'], end_time=_date.get('end_time'), speakers=speakers) current_app.logger.info('Adding event date: {}'.format( _date['event_date'])) dao_create_event_date(e) if _date['event_date'] not in [ _e.event_datetime for _e in event_dates ]: event_dates.append(e) for _date in sorted(dates_to_update, key=lambda k: k['event_date']): speakers = [] for s in _date['speakers']: speaker = dao_get_speaker_by_id(s['speaker_id']) speakers.append(speaker) db_event_date = [ e for e in event.event_dates if e.event_datetime.strftime( '%Y-%m-%d %H:%M') == _date['event_date'] ][0] db_event_date.speakers = speakers if _date['event_date'] not in [ _e.event_datetime for _e in event_dates ]: event_dates.append(db_event_date) if data.get('reject_reasons'): for reject_reason in data.get('reject_reasons'): if reject_reason.get('id'): reject_data = { 'reason': reject_reason['reason'], 'resolved': reject_reason.get('resolved') or False } dao_update_reject_reason(reject_reason.get('id'), **reject_data) else: rr = RejectReason(event_id=event_id, reason=reject_reason['reason'], resolved=reject_reason.get('resolved') or False, created_by=reject_reason.get('created_by')) dao_create_reject_reason(rr) event_data = {} for k in data.keys(): if hasattr(Event, k) and k not in ['reject_reasons']: event_data[k] = data[k] if event_dates: event_data['event_dates'] = event_dates elif data_event_dates == []: error = 'event needs to have a date' raise InvalidRequest('{} needs an event date'.format(event_id), 400) if event_data.get('fee'): update_data = { 'fee': event_data.get('fee'), 'conc_fee': event_data.get('conc_fee'), 'multi_day_fee': event_data.get('multi_day_fee') or 0, 'multi_day_conc_fee': event_data.get('multi_day_conc_fee') or 0, 'event_type_id': event_data.get('event_type_id'), } db_data = { 'fee': event.fee, 'conc_fee': event.conc_fee, 'multi_day_fee': event.multi_day_fee, 'multi_day_conc_fee': event.multi_day_conc_fee, 'event_type_id': str(event.event_type.id), } if update_data != db_data: event_type = dao_get_event_type_by_id( event_data.get('event_type_id')) p = PayPal() try: event_data['booking_code'] = p.create_update_paypal_button( event_id, event_data.get('title'), event_data.get('fee'), event_data.get('conc_fee'), event_data.get('multi_day_fee'), event_data.get('multi_day_conc_fee'), True if event_type.event_type == 'Talk' else False, booking_code=event_data.get('booking_code')) except PaypalException as e: current_app.logger.error(e) errs.append(str(e)) res = dao_update_event(event_id, **event_data) if res: image_data = data.get('image_data') image_filename = data.get('image_filename') storage = Storage(current_app.config['STORAGE']) if image_data: event_year = str(event.event_dates[0].event_datetime).split('-')[0] target_image_filename = '{}/{}'.format(event_year, str(event_id)) storage.upload_blob_from_base64string(image_filename, target_image_filename, image_data) unix_time = time.time() image_filename = '{}?{}'.format(target_image_filename, unix_time) elif image_filename: image_filename_without_cache_buster = image_filename.split('?')[0] if not storage.blob_exists(image_filename_without_cache_buster): raise InvalidRequest( '{} does not exist'.format( image_filename_without_cache_buster), 400) event.image_filename = image_filename dao_update_event(event.id, image_filename=image_filename) json_event = event.serialize() json_event['errors'] = errs if data.get('event_state') == READY: emails_to = [admin.email for admin in dao_get_admin_users()] message = 'Please review this event for publishing <a href="{}">{}</a>'.format( '{}/events/{}'.format(current_app.config['FRONTEND_ADMIN_URL'], event_id), event.title) send_email(emails_to, '{} is ready for review'.format(event.title), message) elif data.get('event_state') == REJECTED: emails_to = [user.email for user in dao_get_users()] message = '<div>Please correct this event <a href="{}">{}</a></div>'.format( '{}/events/{}'.format(current_app.config['FRONTEND_ADMIN_URL'], event_id), event.title) message += '<ol>' for reject_reason in [ rr for rr in json_event.get('reject_reasons') if not rr.get('resolved') ]: message += '<li>{}</li>'.format(reject_reason['reason']) message += '</ol>' send_email(emails_to, '{} event needs to be corrected'.format(event.title), message) return jsonify(json_event), 200 raise InvalidRequest('{} did not update event'.format(event_id), 400)