def create_service_from_json(task_handle, data, srv, event_id, service_ids={}): """ Given :data as json, create the service on server :service_ids are the mapping of ids of already created services. Used for mapping old ids to new """ global CUR_ID # sort by id data.sort(key=lambda k: k['id']) ids = {} ct = 0 total = len(data) # start creating for obj in data: # update status ct += 1 update_state(task_handle, 'Importing %s (%d/%d)' % (srv[0], ct, total)) # trim id field old_id, obj = _trim_id(obj) CUR_ID = old_id # delete not needed fields obj = _delete_fields(srv, obj) # related obj = _fix_related_fields(srv, obj, service_ids) # create object new_obj = srv[1].create(event_id, obj, 'dont')[0] ids[old_id] = new_obj.id # add uploads to queue _upload_media_queue(srv, new_obj) return ids
def create_service_from_json(task_handle, data, srv, event_id, service_ids=None): """ Given :data as json, create the service on server :service_ids are the mapping of ids of already created services. Used for mapping old ids to new """ if service_ids is None: service_ids = {} global CUR_ID # sort by id data.sort(key=lambda k: k['id']) ids = {} ct = 0 total = len(data) # start creating for obj in data: # update status ct += 1 update_state(task_handle, 'Importing %s (%d/%d)' % (srv[0], ct, total)) # trim id field old_id, obj = _trim_id(obj) CUR_ID = old_id # delete not needed fields obj = _delete_fields(srv, obj) # related obj = _fix_related_fields(srv, obj, service_ids) # create object new_obj = srv[1].create(event_id, obj, 'dont')[0] ids[old_id] = new_obj.id # add uploads to queue _upload_media_queue(srv, new_obj) return ids
def import_event_json(task_handle, zip_path): """ Imports and creates event from json zip """ global CUR_ID, UPLOAD_QUEUE UPLOAD_QUEUE = [] update_state(task_handle, 'IMPORTING') with app.app_context(): path = app.config['BASE_DIR'] + '/static/temp/import_event' # delete existing files if os.path.isdir(path): shutil.rmtree(path, ignore_errors=True) # extract files from zip with zipfile.ZipFile(zip_path, "r") as z: z.extractall(path) # create event try: data = json.loads(open(path + '/event', 'r').read()) _, data = _trim_id(data) srv = ('event', EventDAO) data = _delete_fields(srv, data) new_event = EventDAO.create(data, 'dont')[0] version_data = data.get('version', {}) _upload_media_queue(srv, new_event) except BaseError as e: raise make_error('event', er=e) except Exception: raise make_error('event') # create other services try: service_ids = {} for item in IMPORT_SERIES: data = open(path + '/%s' % item[0], 'r').read() dic = json.loads(data) changed_ids = create_service_from_json( dic, item, new_event.id, service_ids) service_ids[item[0]] = changed_ids.copy() CUR_ID = None except BaseError as e: EventDAO.delete(new_event.id) raise make_error(item[0], er=e, id_=CUR_ID) except IOError: EventDAO.delete(new_event.id) raise NotFoundError('File %s missing in event zip' % item[0]) except ValueError: EventDAO.delete(new_event.id) raise make_error(item[0], er=ServerError('Invalid json')) except Exception: EventDAO.delete(new_event.id) raise make_error(item[0], id_=CUR_ID) # run uploads _upload_media(task_handle, new_event.id, path) # set version VersionUpdater(False, new_event.id, '').set(version_data) # return return new_event
def import_event_json(task_handle, zip_path): """ Imports and creates event from json zip """ global CUR_ID, UPLOAD_QUEUE UPLOAD_QUEUE = [] update_state(task_handle, 'Started') with app.app_context(): path = app.config['BASE_DIR'] + '/static/uploads/import_event' # delete existing files if os.path.isdir(path): shutil.rmtree(path, ignore_errors=True) # extract files from zip with zipfile.ZipFile(zip_path, "r") as z: z.extractall(path) # create event try: update_state(task_handle, 'Importing event core') data = json.loads(open(path + '/event', 'r').read()) _, data = _trim_id(data) srv = ('event', EventDAO) data = _delete_fields(srv, data) new_event = EventDAO.create(data, 'dont')[0] version_data = data.get('version', {}) write_file(path + '/social_links', json.dumps(data.get('social_links', []))) # save social_links _upload_media_queue(srv, new_event) except BaseError as e: raise make_error('event', er=e) except Exception as e: raise make_error('event', er=e) # create other services try: service_ids = {} for item in IMPORT_SERIES: item[1].is_importing = True data = open(path + '/%s' % item[0], 'r').read() dic = json.loads(data) changed_ids = create_service_from_json(task_handle, dic, item, new_event.id, service_ids) service_ids[item[0]] = changed_ids.copy() CUR_ID = None item[1].is_importing = False except BaseError as e: EventDAO.delete(new_event.id) raise make_error(item[0], er=e, id_=CUR_ID) except IOError: EventDAO.delete(new_event.id) raise NotFoundError('File %s missing in event zip' % item[0]) except ValueError: EventDAO.delete(new_event.id) raise make_error(item[0], er=ServerError('Invalid json')) except Exception: print traceback.format_exc() EventDAO.delete(new_event.id) raise make_error(item[0], id_=CUR_ID) # run uploads _upload_media(task_handle, new_event.id, path) # set version VersionUpdater(False, new_event.id, '').set(version_data) # return return new_event
def _upload_media(task_handle, event_id, base_path): """ Actually uploads the resources """ global UPLOAD_QUEUE total = len(UPLOAD_QUEUE) ct = 0 for i in UPLOAD_QUEUE: # update progress ct += 1 update_state(task_handle, 'Uploading media (%d/%d)' % (ct, total)) # get upload infos name, dao = i['srv'] id_ = i['id'] if name == 'event': item = dao.get(event_id) else: item = dao.get(event_id, id_) # get cur file field = i['field'] path = getattr(item, field) if path.startswith('/'): # relative files path = base_path + path if os.path.isfile(path): filename = path.rsplit('/', 1)[1] file = UploadedFile(path, filename) else: file = '' # remove current file setting else: # absolute links try: filename = UPLOAD_PATHS[name][field].rsplit('/', 1)[1] if is_downloadable(path): r = requests.get(path, allow_redirects=True) file = UploadedMemory(r.content, filename) else: file = None except: file = None # don't update current file setting if file is None: continue # upload try: if file == '': raise Exception() key = UPLOAD_PATHS[name][field] if name == 'event': key = key.format(event_id=event_id) else: key = key.format(event_id=event_id, id=id_) print key new_url = upload(file, key) except Exception: print traceback.format_exc() new_url = None setattr(item, field, new_url) save_to_db(item, msg='Url updated') # clear queue UPLOAD_QUEUE = [] return
def update_status(task_handle, status): if task_handle and status: update_state(task_handle, status)
def _upload_media(task_handle, event_id, base_path): """ Actually uploads the resources """ global UPLOAD_QUEUE total = len(UPLOAD_QUEUE) ct = 0 for i in UPLOAD_QUEUE: # update progress ct += 1 update_state(task_handle, 'UPLOADING MEDIA (%d/%d)' % (ct, total)) # get upload infos name, dao = i['srv'] id_ = i['id'] if name == 'event': item = dao.get(event_id) else: item = dao.get(event_id, id_) # get cur file field = i['field'] path = getattr(item, field) if path.startswith('/'): # relative files path = base_path + path if os.path.isfile(path): filename = path.rsplit('/', 1)[1] file = UploadedFile(path, filename) else: file = '' # remove current file setting else: # absolute links try: filename = UPLOAD_PATHS[name][field].rsplit('/', 1)[1] if is_downloadable(path): r = requests.get(path, allow_redirects=True) file = UploadedMemory(r.content, filename) else: file = None except: file = None # don't update current file setting if file is None: continue # upload try: if file == '': raise Exception() key = UPLOAD_PATHS[name][field] if name == 'event': key = key.format(event_id=event_id) else: key = key.format(event_id=event_id, id=id_) print key new_url = upload(file, key) except Exception: print traceback.format_exc() new_url = None setattr(item, field, new_url) save_to_db(item, msg='Url updated') # clear queue UPLOAD_QUEUE = [] return