def issue_query_pid(context, data_dict):
    cntr = VersionedDataStoreController.get_instance()

    resource_id = data_dict.get('resource_id', '')

    statement = data_dict.get('statement', {})
    q = data_dict.get('q', None)
    projection = data_dict.get('projection', [])
    sort = data_dict.get('sort', [])

    return cntr.issue_pid(resource_id, statement, projection, sort, q)
def querystore_resolve(context, data_dict):
    cntr = VersionedDataStoreController.get_instance()

    id = data_dict.get('id')

    skip = data_dict.get('offset', 0)
    limit = data_dict.get('limit', 0)
    include_data = data_dict.get('include_data', 'true').lower() == 'true'

    if skip:
        skip = int(skip)
    if limit:
        limit = int(limit)

    return cntr.execute_stored_query(id,
                                     offset=skip,
                                     limit=limit,
                                     include_data=include_data)
def nv_query(context, data_dict):
    cntr = VersionedDataStoreController.get_instance()

    resource_id = data_dict.get('resource_id')
    q = data_dict.get('q', None)
    projection = data_dict.get('fields', [])
    sort = data_dict.get('sort', None)
    skip = int(data_dict.get('offset', 0))
    limit = int(data_dict.get('limit', 0))
    statement = json.loads(data_dict.get('filters', '{}'))

    if limit < MIN_LIMIT:
        limit = MIN_LIMIT

    if limit > MAX_LIMIT:
        limit = MAX_LIMIT

    if q:
        result = cntr.query_by_fulltext(resource_id,
                                        q,
                                        projection,
                                        sort,
                                        skip,
                                        limit,
                                        True,
                                        none_versioned=True)
    else:
        result = cntr.query_by_filters(resource_id,
                                       statement,
                                       projection,
                                       sort,
                                       skip,
                                       limit,
                                       True,
                                       False,
                                       none_versioned=True)

    result['offset'] = skip
    result['limit'] = limit

    return result
def mongodatastore_check_integrity(config=None):
    print(config)
    cntr = VersionedDataStoreController.get_instance()
    error_list = []

    start = datetime.utcnow()
    for id in cntr.querystore.get_cursor_on_ids():
        internal_id = int(id[0])
        if not cntr.execute_stored_query(internal_id, 0, 0, True):
            error_list.append(internal_id)
            print('query {0} is not valid!'.format(internal_id))
        else:
            print('query {0} is valid!'.format(internal_id))

    stop = datetime.utcnow()

    print('integrity check stopped after {0} seconds'.format(
        (stop - start).total_seconds()))
    print('{0} problems detected'.format(len(error_list)))
    if len(error_list) > 0:
        print('The following PIDs do not retrieve a valid result set:')
        for internal_id in error_list:
            print(internal_id)
def dump_query(internal_id):
    datastore_cntr = VersionedDataStoreController.get_instance()

    def convert_csv_field(value):
        if value:
            return str(value).replace('\n', '')
        return ''

    def to_csv():
        index = 0
        result = datastore_cntr.execute_stored_query(internal_id,
                                                     index,
                                                     CHUNK_SIZE,
                                                     include_data=True)
        records = list(result['records'])

        fields = [field['id'] for field in result['fields']]
        if csv_include_header:
            yield generate_header(fields, csv_delimiter) + '\n'

        while len(records) != 0:
            for record in records:
                yield csv_delimiter.join(
                    map(lambda f: convert_csv_field(record[f]), fields)) + '\n'
            index = index + CHUNK_SIZE
            result = datastore_cntr.execute_stored_query(internal_id,
                                                         index,
                                                         CHUNK_SIZE,
                                                         include_data=True)
            records = list(result['records'])

    def to_json():
        index = 0
        result = datastore_cntr.execute_stored_query(internal_id,
                                                     index,
                                                     CHUNK_SIZE,
                                                     include_data=True)
        records = list(result['records'])

        yield '[\n'
        while len(records) != 0:
            for record in records:
                yield json.dumps(record) + ', \n'
            index = index + CHUNK_SIZE
            result = datastore_cntr.execute_stored_query(internal_id,
                                                         index,
                                                         CHUNK_SIZE,
                                                         include_data=True)
            records = list(result['records'])
        yield ']'

    def to_xml():
        index = 0
        result = datastore_cntr.execute_stored_query(internal_id,
                                                     index,
                                                     CHUNK_SIZE,
                                                     include_data=True)
        records = list(result['records'])
        yield '<records>'
        while len(records) != 0:
            for record in records:
                yield xmltodict.unparse({'record': record},
                                        full_document=False) + '\n'
            index = index + CHUNK_SIZE
            result = datastore_cntr.execute_stored_query(internal_id,
                                                         index,
                                                         CHUNK_SIZE,
                                                         include_data=True)
            records = list(result['records'])
        yield '</records>'

    export_format = request.args.get('format', 'json')
    csv_delimiter = request.args.get('csvDelimiter', ';')
    csv_include_header = request.args.get('includeHeader',
                                          'true').lower() == 'true'

    if export_format == 'csv':
        r = Response(to_csv(),
                     mimetype='text/csv',
                     content_type='application/octet-datadump')
        r.headers.set('Content-Disposition',
                      'attachment',
                      filename='{0}.csv'.format(internal_id))
        return r
    elif export_format == 'xml':
        r = Response(to_xml(),
                     mimetype='text/xml',
                     content_type='application/octet-datadump')
        r.headers.set('Content-Disposition',
                      'attachment',
                      filename='{0}.xml'.format(internal_id))
        return r
    elif export_format == 'json':
        r = Response(to_json(),
                     mimetype='text/json',
                     content_type='application/octet-datadump')
        r.headers.set('Content-Disposition',
                      'attachment',
                      filename='{0}.json'.format(internal_id))
        return r

    abort(405, 'Export format "{0}" not supported'.format(export_format))
Exemple #6
0
 def configure(self, cfg):
     VersionedDataStoreController.reload_config(cfg)
     return cfg
Exemple #7
0
 def __init__(self):
     self.mongo_cntr = VersionedDataStoreController.get_instance()
     self.enable_sql_search = True