def cleanErrItem(item_id, count): try: bucket = getBucket() i = 0 while count > i: if i == 0: filename = '%s.jp2' % item_id else: filename = '%s/%s.jp2' % (item_id, i) i += 1 bucket.delete_key(S3_DEFAULT_FOLDER + filename) if count > 1: filename = '%s/' % item_id bucket.delete_key(S3_DEFAULT_FOLDER + filename) except: pass try: cloudsearch = getCloudSearch(CLOUDSEARCH_ITEM_DOMAIN, 'document') cloudsearch.delete(hashlib.sha512(item_id).hexdigest()[:128]) cloudsearch.commit() except: pass try: Item(item_id).delete() except: pass return
ERR_MESSAGE_HTTP = 4 ERR_MESSAGE_IMAGE = 3 ERR_MESSAGE_S3 = 2 ERR_MESSAGE_OTHER = 1 ERR_MESSAGE_NONE = 0 @task_queue.task def ingestQueue(batch_id, item_id, task_id): try: task = Task(batch_id, item_id, task_id) except NoItemInDb, ErrorItemImport: return -1 try: if S3_HOST is not None and S3_DEFAULT_BUCKET is not None: bucket = getBucket() else: # local storage only bucket = None if task.type == 'del': try: item = Item(item_id) if task.url_order > 0: filename = '%s/%s.jp2' % (item_id, task.url_order) else: filename = '%s.jp2' % item_id if bucket is not None: