def rebuild_elastic_index(): index_name = app.config['CONTENTAPI_ELASTICSEARCH_INDEX'] print('Starting index rebuilding for index: {}'.format(index_name)) try: es = get_es(app.config['CONTENTAPI_ELASTICSEARCH_URL']) clone_name = index_name + '-' + get_random_string() print('Creating index: ', clone_name) app.data.elastic.create_index(clone_name, app.config['ELASTICSEARCH_SETTINGS']) real_name = app.data.elastic.get_index_by_alias(clone_name) print('Putting mapping for index: ', clone_name) app.data.elastic.put_mapping(app, clone_name) print('Starting index rebuilding.') reindex(es, index_name, clone_name) print('Finished index rebuilding.') print('Deleting index: ', index_name) get_indices(es).delete(index_name) print('Creating alias: ', index_name) get_indices(es).put_alias(index=real_name, name=index_name) print('Alias created.') print('Deleting clone name alias') get_indices(es).delete_alias(name=clone_name, index=real_name) print('Deleted clone name alias') except elasticsearch.exceptions.NotFoundError as nfe: print(nfe) print('Index {0} rebuilt successfully.'.format(index_name))
def run(self, index_name=None): # if no index name is passed then use the configured one index_name = index_name if index_name else superdesk.app.config['ELASTICSEARCH_INDEX'] print('Starting index rebuilding for index: {}'.format(index_name)) if index_name not in self._get_available_indexes(): raise Exception("Index {} is not configured".format(index_name)) try: es = get_es(superdesk.app.config['ELASTICSEARCH_URL']) clone_name = index_name + '-' + get_random_string() print('Creating index: ', clone_name) superdesk.app.data.elastic.create_index(clone_name, superdesk.app.config['ELASTICSEARCH_SETTINGS']) real_name = superdesk.app.data.elastic.get_index_by_alias(clone_name) print('Putting mapping for index: ', clone_name) superdesk.app.data.elastic.put_mapping(superdesk.app, clone_name) print('Starting index rebuilding.') reindex(es, index_name, clone_name) print('Finished index rebuilding.') print('Deleting index: ', index_name) get_indices(es).delete(index_name) print('Creating alias: ', index_name) get_indices(es).put_alias(index=real_name, name=index_name) print('Alias created.') print('Deleting clone name alias') get_indices(es).delete_alias(name=clone_name, index=real_name) print('Deleted clone name alias') except elasticsearch.exceptions.NotFoundError as nfe: print(nfe) print('Index {0} rebuilt successfully.'.format(index_name))
def create(self, docs, **kwargs): ids = [] for doc in docs: item = doc.pop("item") crop = doc.pop("crop") orig = item["renditions"]["original"] size = get_crop_size(crop, orig.get("width", 800), orig.get("height", 600)) orig_file = get_file(orig, item) filename = get_random_string() ok, output = crop_image(orig_file, filename, crop, size) if ok: metadata = encode_metadata(process_image(orig_file)) metadata.update({"length": json.dumps(len(output.getvalue()))}) output = fix_orientation(output) media = app.media.put(output, filename=filename, content_type=orig["mimetype"], metadata=metadata) doc["href"] = app.media.url_for_media(media, orig["mimetype"]) doc["width"] = output.width doc["height"] = output.height doc["metadata"] = metadata ids.append(media) return ids
def run(self, index_name=None): # if no index name is passed then use the configured one indexes = list(current_app.data.elastic._get_indexes().keys()) if index_name and index_name in indexes: indexes = [index_name] elif index_name: raise Exception("Index {} is not configured".format(index_name)) for index_name in indexes: try: print('Starting index rebuilding for index: {}'.format( index_name)) es = get_es(superdesk.app.config['ELASTICSEARCH_URL']) clone_name = index_name + '-' + get_random_string() print('Creating index: ', clone_name) superdesk.app.data.elastic.create_index( clone_name, superdesk.app.config['ELASTICSEARCH_SETTINGS']) real_name = superdesk.app.data.elastic.get_index_by_alias( clone_name) print('Putting mapping for index: ', clone_name) superdesk.app.data.elastic.put_mapping(superdesk.app, clone_name) print('Starting index rebuilding.') reindex(es, index_name, clone_name) print('Finished index rebuilding.') print('Deleting index: ', index_name) get_indices(es).delete(index_name) print('Creating alias: ', index_name) get_indices(es).put_alias(index=real_name, name=index_name) print('Alias created.') print('Deleting clone name alias') get_indices(es).delete_alias(name=clone_name, index=real_name) print('Deleted clone name alias') except elasticsearch.exceptions.NotFoundError as nfe: print(nfe) print('Index {0} rebuilt successfully.'.format(index_name))
def store_reset_password_token(self, doc, email, days_alive, user_id): now = utcnow() doc[app.config['DATE_CREATED']] = now doc[app.config['LAST_UPDATED']] = now doc['expire_time'] = now + timedelta(days=days_alive) doc['user'] = user_id doc['token'] = get_random_string() ids = super().create([doc]) return ids
def on_create(self, docs): for doc in docs: user = self.authenticate(doc) if not user.get('is_active', False): raise UserInactiveError() doc['user'] = user['_id'] doc['token'] = utils.get_random_string(40) del doc['password']
def store_reset_password_token(self, doc, email, days_alive, user_id): token_ttl = app.config['RESET_PASSWORD_TOKEN_TIME_TO_LIVE'] now = utcnow() doc[app.config['DATE_CREATED']] = now doc[app.config['LAST_UPDATED']] = now doc['expire_time'] = now + timedelta(days=token_ttl) doc['user'] = user_id doc['token'] = get_random_string() ids = super().create([doc]) return ids
def on_create(self, docs): for doc in docs: user = self.authenticate(doc) if user.get('status', 'active') == 'inactive': raise UserInactiveError() doc['user'] = user['_id'] doc['token'] = utils.get_random_string(40) del doc['password']
def initialize_reset_password(self, doc, email): user = app.data.find_one('users', req=None, email=email) if not user: logger.warning('User password reset triggered with invalid email: %s' % email) raise superdesk.SuperdeskError(status_code=201, message='Created') doc[app.config['DATE_CREATED']] = utcnow() doc[app.config['LAST_UPDATED']] = utcnow() doc['user'] = user['_id'] doc['token'] = get_random_string() ids = super().create([doc]) send_reset_password_email(doc) self.remove_private_data(doc) return ids
def create(self, docs, **kwargs): doc = docs[0] formatter = self._validate_and_get_formatter(doc) validate = doc.get("validate", False) archive_service = get_resource_service("archive") unsuccessful_exports = 0 try: in_memory_zip = BytesIO() with ZipFile(in_memory_zip, "a") as zip: for item_id in doc.get("item_ids"): item = archive_service.find_one(req=None, _id=item_id) if item: try: if validate: self._validate_for_publish(item) contents = formatter.export(item) # Remove invalid filename chars (for windows OS) and create the filename filename = ( re.sub(r'[\\/*?:"<>|]', "", item.get("slugline", "")) + "_" + str(item.get("unique_id")) + ".txt" ) zip.writestr(filename, contents.encode("UTF-8")) except ValidationError: unsuccessful_exports += 1 else: unsuccessful_exports += 1 url = None # Store the zip file on media_storage # only if at least one item is formatted successfully if unsuccessful_exports < len(doc.get("item_ids")): zip_id = app.media.put( in_memory_zip.getvalue(), filename="export_{}.zip".format(get_random_string()), content_type="application/zip", folder="temp", ) url = app.media.url_for_download(zip_id, "application/zip") doc["url"] = url doc["failures"] = unsuccessful_exports return [len(docs)] except Exception as ex: raise SuperdeskApiError.badRequestError( _("Error creating export zip file. Try again please."), exception=ex )
def __init__(self, url, callback): """Create consumer. :param string url: Broker URL :param string host: host name running the websocket server :param callback: callback function to call on message arrival """ super().__init__(url) self.callback = callback self.queue_name = 'socket_consumer_{}'.format(get_random_string()) # expire message after 10 seconds and queue after 60 seconds self.queue = Queue(self.queue_name, exchange=self.socket_exchange, channel=self.channel, queue_arguments={'x-message-ttl': 10000, 'x-expires': 60000})
def __init__(self, url, callback, exchange_name=None): """Create consumer. :param string url: Broker URL :param string host: host name running the websocket server :param callback: callback function to call on message arrival """ super().__init__(url, exchange_name) self.callback = callback self.queue_name = 'socket_consumer_{}'.format(get_random_string()) # expire message after 10 seconds and queue after 60 seconds self.queue = Queue(self.queue_name, exchange=self.socket_exchange, channel=self.channel, queue_arguments={'x-message-ttl': 10000, 'x-expires': 60000})
def create(self, docs, **kwargs): ids = [] for doc in docs: item = doc.pop('item') crop = doc.pop('crop') size = get_crop_size(crop) orig = item['renditions']['original'] orig_file = get_file(orig, item) filename = get_random_string() ok, output = crop_image(orig_file, filename, crop, size) if ok: media = app.media.put(output, filename, orig['mimetype']) doc['href'] = app.media.url_for_media(media, orig['mimetype']) doc['width'] = output.width doc['height'] = output.height ids.append(media) return ids
def create(self, docs, **kwargs): doc = docs[0] formatter = self._validate_and_get_formatter(doc) validate = doc.get('validate', False) archive_service = get_resource_service('archive') unsuccessful_exports = 0 try: in_memory_zip = BytesIO() with ZipFile(in_memory_zip, 'a') as zip: for item_id in doc.get('item_ids'): item = archive_service.find_one(req=None, _id=item_id) if item: try: if validate: self._validate_for_publish(item) contents = formatter.export(item) # Remove invalid filename chars (for windows OS) and create the filename filename = re.sub(r'[\\/*?:"<>|]', '', item.get('slugline', '')) +\ '_' + str(item.get('unique_id')) + '.txt' zip.writestr(filename, contents.encode("UTF-8")) except: unsuccessful_exports += 1 else: unsuccessful_exports += 1 url = None # Store the zip file on media_storage # only if at least one item is formatted successfully if unsuccessful_exports < len(doc.get('item_ids')): zip_id = app.media.put( in_memory_zip.getvalue(), filename='export_{}.zip'.format(get_random_string()), content_type='application/zip', folder='temp' ) url = app.media.url_for_download(zip_id, 'application/zip') doc['url'] = url doc['failures'] = unsuccessful_exports return [len(docs)] except Exception as ex: raise SuperdeskApiError.badRequestError('Error creating export zip file. Try again please.', exception=ex)
def create(self, docs, **kwargs): ids = [] for doc in docs: item = doc.pop('item') crop = doc.pop('crop') size = get_crop_size(crop) orig = item['renditions']['original'] orig_file = get_file(orig, item) filename = get_random_string() ok, output = crop_image(orig_file, filename, crop, size) if ok: metadata = encode_metadata(process_image(orig_file)) metadata.update({'length': json.dumps(len(output.getvalue()))}) output = fix_orientation(output) media = app.media.put(output, filename, orig['mimetype'], metadata=metadata) doc['href'] = app.media.url_for_media(media, orig['mimetype']) doc['width'] = output.width doc['height'] = output.height ids.append(media) return ids
def create(self, docs, **kwargs): ids = [] for doc in docs: item = doc.pop('item') crop = doc.pop('crop') orig = item['renditions']['original'] size = get_crop_size(crop, orig.get('width', 800), orig.get('height', 600)) orig_file = get_file(orig, item) filename = get_random_string() ok, output = crop_image(orig_file, filename, crop, size) if ok: metadata = encode_metadata(process_image(orig_file)) metadata.update({'length': json.dumps(len(output.getvalue()))}) output = fix_orientation(output) media = app.media.put(output, filename, orig['mimetype'], metadata=metadata) doc['href'] = app.media.url_for_media(media, orig['mimetype']) doc['width'] = output.width doc['height'] = output.height ids.append(media) return ids
def run(self): index_name = superdesk.app.config['ELASTICSEARCH_INDEX'] print('Starting index rebuilding for index: ', index_name) try: es = get_es(superdesk.app.config['ELASTICSEARCH_URL']) clone_name = index_name + '-' + get_random_string() print('Creating index: ', clone_name) superdesk.app.data.elastic.create_index(clone_name, superdesk.app.config['ELASTICSEARCH_SETTINGS']) print('Putting mapping for index: ', clone_name) superdesk.app.data.elastic.put_mapping(superdesk.app, clone_name) print('Starting index rebuilding.') reindex(es, index_name, clone_name) print('Finished index rebuilding.') print('Deleting index: ', index_name) get_indices(es).delete(index_name) print('Creating alias: ', index_name) get_indices(es).put_alias(index=clone_name, name=index_name) print('Alias created.') except elasticsearch.exceptions.NotFoundError as nfe: print(nfe) print('Index {0} rebuilt successfully.'.format(index_name))
def run(self): index_name = superdesk.app.config['ELASTICSEARCH_INDEX'] print('Starting index rebuilding for index: ', index_name) try: es = get_es(superdesk.app.config['ELASTICSEARCH_URL']) clone_name = index_name + '-' + get_random_string() print('Creating index: ', clone_name) get_indices(es).create(clone_name) print('Putting mapping for index: ', clone_name) superdesk.app.data.elastic.put_mapping(superdesk.app, clone_name) print('Starting index rebuilding.') reindex(es, index_name, clone_name) print('Finished index rebuilding.') print('Deleting index: ', index_name) get_indices(es).delete(index_name) print('Creating alias: ', index_name) get_indices(es).put_alias(index_name, clone_name) print('Alias created.') except elasticsearch.exceptions.NotFoundError as nfe: print(nfe) print('Index {0} rebuilt successfully.'.format(index_name))
def set_auth_default(self, doc, user_id): doc["user"] = user_id doc["token"] = utils.get_random_string(40) doc.pop("password", None)
def set_auth_default(self, doc, user_id): doc['user'] = user_id doc['token'] = utils.get_random_string(40) doc.pop('password', None)
def set_auth_default(self, doc, user_id): doc['user'] = user_id doc['token'] = utils.get_random_string(40)
def set_auth_default(self, doc, user_id): doc["user"] = user_id doc["token"] = utils.get_random_string(40)
def on_create(self, docs): for doc in docs: user = authenticate(doc, app.data) doc['user'] = user['_id'] doc['token'] = utils.get_random_string(40) del doc['password']