def import_event_json(task_handle, zip_path, creator_id): """ Imports and creates event from json zip """ global CUR_ID, UPLOAD_QUEUE UPLOAD_QUEUE = [] update_state(task_handle, 'Started') with app.app_context(): path = app.config['BASE_DIR'] + '/static/uploads/import_event' # delete existing files if os.path.isdir(path): shutil.rmtree(path, ignore_errors=True) # extract files from zip with zipfile.ZipFile(zip_path, "r") as z: z.extractall(path) # create event try: update_state(task_handle, 'Importing event core') data = json.loads(open(path + '/event', 'r').read()) _, data = _trim_id(data) srv = ('event', Event) data = _delete_fields(srv, data) new_event = Event(**data) save_to_db(new_event) role = Role.query.filter_by(name=OWNER).first() user = User.query.filter_by(id=creator_id).first() uer = UsersEventsRoles(user_id=user.id, event_id=new_event.id, role_id=role.id) save_to_db(uer, 'Event Saved') write_file( path + '/social_links', json.dumps(data.get('social_links', [])).encode('utf-8') ) # save social_links _upload_media_queue(srv, new_event) except Exception as e: raise make_error('event', er=e) # create other services try: service_ids = {} for item in IMPORT_SERIES: item[1].is_importing = True data = open(path + '/%s' % item[0], 'r').read() dic = json.loads(data) changed_ids = create_service_from_json( task_handle, dic, item, new_event.id, service_ids) service_ids[item[0]] = changed_ids.copy() CUR_ID = None item[1].is_importing = False except IOError: db.session.delete(new_event) db.session.commit() raise NotFoundError('File %s missing in event zip' % item[0]) except ValueError: db.session.delete(new_event) db.session.commit() raise make_error(item[0], er=ServerError(source='Zip Upload', detail='Invalid json')) except Exception: print(traceback.format_exc()) db.session.delete(new_event) db.session.commit() raise make_error(item[0], id_=CUR_ID) # run uploads _upload_media(task_handle, new_event.id, path) # return return new_event
def import_event_json(task_handle, zip_path): """ Imports and creates event from json zip """ global CUR_ID, UPLOAD_QUEUE UPLOAD_QUEUE = [] update_state(task_handle, 'Started') with app.app_context(): path = app.config['BASE_DIR'] + '/static/uploads/import_event' # delete existing files if os.path.isdir(path): shutil.rmtree(path, ignore_errors=True) # extract files from zip with zipfile.ZipFile(zip_path, "r") as z: z.extractall(path) # create event try: update_state(task_handle, 'Importing event core') data = json.loads(open(path + '/event', 'r').read()) _, data = _trim_id(data) srv = ('event', Event) data = _delete_fields(srv, data) new_event = Event(**data) db.session.add(new_event) db.session.commit() write_file( path + '/social_links', json.dumps(data.get('social_links', [])) ) # save social_links _upload_media_queue(srv, new_event) except Exception as e: raise make_error('event', er=e) # create other services try: service_ids = {} for item in IMPORT_SERIES: item[1].is_importing = True data = open(path + '/%s' % item[0], 'r').read() dic = json.loads(data) changed_ids = create_service_from_json( task_handle, dic, item, new_event.id, service_ids) service_ids[item[0]] = changed_ids.copy() CUR_ID = None item[1].is_importing = False except IOError: db.session.delete(new_event) db.session.commit() raise NotFoundError('File %s missing in event zip' % item[0]) except ValueError: db.session.delete(new_event) db.session.commit() raise make_error(item[0], er=ServerError(source='Zip Upload', detail='Invalid json')) except Exception: print(traceback.format_exc()) db.session.delete(new_event) db.session.commit() raise make_error(item[0], id_=CUR_ID) # run uploads _upload_media(task_handle, new_event.id, path) # return return new_event