Пример #1
0
 def meta(self):
     self._meta = self._meta or {}
     self._meta['content_hash'] = self.content_hash
     self._meta['foreign_id'] = self.foreign_id
     self._meta['crawler'] = self.crawler
     self._meta['crawler_run'] = self.crawler_run
     return Metadata.from_data(self._meta or {})
Пример #2
0
def ingest_url(self, collection_id, metadata, url):
    meta = Metadata.from_data(metadata)
    tmp_path = make_tempfile(meta.file_name, suffix=meta.extension)
    try:
        log.info("Ingesting URL: %s", url)
        res = requests.get(url, stream=True)
        if res.status_code == 404:
            log.info("HTTP not found: %s", url)
            return
        if res.status_code >= 399:
            countdown = 3600 ** self.request.retries
            self.retry(countdown=countdown)
        with open(tmp_path, 'w') as fh:
            for chunk in res.iter_content(chunk_size=1024):
                if chunk:
                    fh.write(chunk)
        if not meta.has('source_url'):
            meta.source_url = res.url
        meta.headers = res.headers
        meta = get_archive().archive_file(tmp_path, meta, move=True)
        Ingestor.dispatch(collection_id, meta)
    except IOError as ioe:
        log.info("IO Failure: %r", ioe)
        countdown = 3600 ** self.request.retries
        self.retry(countdown=countdown)
    except Exception as ex:
        Ingestor.handle_exception(meta, collection_id, ex)
    finally:
        db.session.remove()
        remove_tempfile(tmp_path)
Пример #3
0
def ingest_upload(collection_id):
    collection = obj_or_404(Collection.by_id(collection_id))
    authz.require(authz.collection_write(collection.id))
    log_event(request)
    try:
        meta = json.loads(request.form.get('meta', '{}'))
    except Exception as ex:
        raise BadRequest(unicode(ex))

    metas = []
    for storage in request.files.values():
        file_meta = meta.copy()
        file_meta['mime_type'] = storage.mimetype
        file_meta['file_name'] = storage.filename
        validate(file_meta, 'metadata.json#')
        file_meta = Metadata.from_data(file_meta)
        file_meta.crawler_id = 'user_upload:%s' % request.auth_role.id
        file_meta.crawler_run = make_textid()
        sec_fn = os.path.join(get_upload_folder(),
                              secure_filename(storage.filename))
        storage.save(sec_fn)
        ingest_file(collection.id, file_meta, sec_fn, move=True,
                    queue=USER_QUEUE, routing_key=USER_ROUTING_KEY)
        metas.append(file_meta)
    return jsonify({'status': 'ok', 'metadata': metas})
Пример #4
0
def ingest_upload(collection_id):
    collection = obj_or_404(Collection.by_id(collection_id))
    request.authz.require(request.authz.collection_write(collection.id))
    log_event(request)
    try:
        meta = json.loads(request.form.get('meta', '{}'))
        meta['crawler_id'] = 'user_upload:%s' % request.authz.role.id
        meta['crawler_run'] = make_textid()

    except Exception as ex:
        raise BadRequest(unicode(ex))

    metas = []
    for storage in request.files.values():
        file_meta = meta.copy()
        file_meta['mime_type'] = storage.mimetype
        file_meta['file_name'] = storage.filename
        file_meta['source_path'] = storage.filename
        validate(file_meta, 'metadata.json#')
        file_meta = Metadata.from_data(file_meta)
        sec_fn = os.path.join(upload_folder, secure_filename(storage.filename))
        storage.save(sec_fn)
        ingest_file(collection_id, file_meta, sec_fn, move=True,
                    queue=USER_QUEUE, routing_key=USER_ROUTING_KEY)
        metas.append(file_meta)
    return jsonify({'status': 'ok', 'metadata': metas})
Пример #5
0
 def test_normalize(self):
     for number in PHONE_NUMBERS:
         meta = Metadata.from_data({'countries': ['de']})
         analyzer = PhoneNumberAnalyzer(None, meta)
         analyzer.prepare()
         analyzer.on_text(number)
         analyzer.finalize()
Пример #6
0
def ingest_url(self, collection_id, metadata, url):
    meta = Metadata.from_data(metadata)
    if meta.foreign_id is None:
        meta.foreign_id = url
    tmp_path = make_tempfile(meta.file_name, suffix=meta.extension)
    try:
        log.info("Ingesting URL: %s", url)
        res = requests.get(url, stream=True)
        if res.status_code == 404:
            log.info("HTTP not found: %s", url)
            return
        if res.status_code >= 399:
            countdown = 3600**self.request.retries
            self.retry(countdown=countdown)
        with open(tmp_path, 'w') as fh:
            for chunk in res.iter_content(chunk_size=1024):
                if chunk:
                    fh.write(chunk)
        if not meta.has('source_url'):
            meta.source_url = res.url
        meta.headers = res.headers
        meta = archive.archive_file(tmp_path, meta, move=True)
        Ingestor.dispatch(collection_id, meta)
    except IOError as ioe:
        log.info("IO Failure: %r", ioe)
        countdown = 3600**self.request.retries
        self.retry(countdown=countdown)
    except Exception as ex:
        Ingestor.handle_exception(meta, collection_id, ex)
    finally:
        db.session.remove()
        remove_tempfile(tmp_path)
Пример #7
0
 def test_basic_functions(self):
     meta = Metadata.from_data({
         'file_name': 'foo.doc',
         'title': '  ',
         'languages': ['en', 'xx']
     })
     assert meta.file_name == 'foo.doc', meta.file_name
     assert meta.title == 'foo.doc', meta.title
     assert not len(meta.countries), meta.countries
Пример #8
0
 def test_basic_functions(self):
     meta = Metadata.from_data({
         'file_name': 'foo.doc',
         'title': '  ',
         'languages': ['en', 'xx']
     })
     assert meta.file_name == 'foo.doc', meta.file_name
     assert meta.title == 'foo.doc', meta.title
     assert not len(meta.countries), meta.countries
Пример #9
0
def ingest_url(collection_id, metadata, url):
    meta = Metadata.from_data(metadata)
    tmp_path = make_tempfile(meta.file_name, suffix=meta.extension)
    try:
        log.info("Ingesting URL: %r", url)
        res = requests.get(url, stream=True, timeout=120)
        if res.status_code >= 400:
            msg = "HTTP Error %r: %r" % (url, res.status_code)
            raise IngestorException(msg)
        with open(tmp_path, 'w') as fh:
            for chunk in res.iter_content(chunk_size=1024):
                if chunk:
                    fh.write(chunk)
        if not meta.has('source_url'):
            meta.source_url = res.url
        meta.headers = res.headers
        meta = get_archive().archive_file(tmp_path, meta, move=True)
        Ingestor.dispatch(collection_id, meta)
    except Exception as ex:
        Ingestor.handle_exception(meta, collection_id, ex)
    finally:
        db.session.remove()
        remove_tempfile(tmp_path)
Пример #10
0
def ingest_url(collection_id, metadata, url):
    meta = Metadata.from_data(metadata)
    tmp_path = make_tempfile(meta.file_name, suffix=meta.extension)
    try:
        log.info("Ingesting URL: %r", url)
        res = requests.get(url, stream=True, timeout=120)
        if res.status_code >= 400:
            msg = "HTTP Error %r: %r" % (url, res.status_code)
            raise IngestorException(msg)
        with open(tmp_path, 'w') as fh:
            for chunk in res.iter_content(chunk_size=1024):
                if chunk:
                    fh.write(chunk)
        if not meta.has('source_url'):
            meta.source_url = res.url
        meta.headers = res.headers
        meta = get_archive().archive_file(tmp_path, meta, move=True)
        Ingestor.dispatch(collection_id, meta)
    except Exception as ex:
        Ingestor.handle_exception(meta, collection_id, ex)
    finally:
        db.session.remove()
        remove_tempfile(tmp_path)
Пример #11
0
def ingest_upload(collection_id):
    collection = obj_or_404(Collection.by_id(collection_id))
    authz.require(authz.collection_write(collection.id))
    log_event(request)
    try:
        meta = json.loads(request.form.get("meta", "{}"))
    except Exception as ex:
        raise BadRequest(unicode(ex))

    metas = []
    for storage in request.files.values():
        file_meta = meta.copy()
        file_meta["mime_type"] = storage.mimetype
        file_meta["file_name"] = storage.filename
        validate(file_meta, "metadata.json#")
        file_meta = Metadata.from_data(file_meta)
        file_meta.crawler_id = "user_upload:%s" % request.auth_role.id
        file_meta.crawler_run = make_textid()
        sec_fn = os.path.join(get_upload_folder(), secure_filename(storage.filename))
        storage.save(sec_fn)
        ingest_file(collection.id, file_meta, sec_fn, move=True)
        metas.append(file_meta)
    return jsonify({"status": "ok", "metadata": metas})
Пример #12
0
 def make_meta(self, data={}):
     data = json.loads(json.dumps(data))
     meta = Metadata.from_data(data)
     meta.crawler = self.get_id()
     meta.crawler_run = self.crawler_run
     return meta
Пример #13
0
def ingest(collection_id, metadata):
    meta = Metadata.from_data(metadata)
    Ingestor.dispatch(collection_id, meta)
Пример #14
0
def ingest(collection_id, metadata):
    meta = Metadata.from_data(metadata)
    Ingestor.dispatch(collection_id, meta)
Пример #15
0
 def meta(self):
     self._meta = self._meta or {}
     self._meta['content_hash'] = self.content_hash
     self._meta['foreign_id'] = self.foreign_id
     return Metadata.from_data(self._meta or {})
Пример #16
0
 def meta(self):
     self._meta = self._meta or {}
     self._meta['content_hash'] = self.content_hash
     self._meta['foreign_id'] = self.foreign_id
     return Metadata.from_data(self._meta or {})
Пример #17
0
 def make_meta(self, data={}):
     data = json.loads(json.dumps(data))
     meta = Metadata.from_data(data)
     meta.crawler = self.get_id()
     meta.crawler_run = self.crawler_run
     return meta