Esempio n. 1
0
def save_default_model(datasetname):

    dataset = get_dataset(datasetname)

    if not dataset.mapping or not dataset.source:
        return jsonify({"errors":["No mapping for this dataset"]})

    if not dataset.dataorg:
        return jsonify({"errors":['Has no dataorg']})



    #get the OR instructions from dataset
    ORinstructions = dataset.source.getORInstructions()

    #get the OR instructions from dataset
    mapping = dataset.mapping

    dataorg = dataset.dataorg

    dataorg.ORTemplate = {"data": ORinstructions}
    dataorg.mappingTemplate = mapping


    db.session.commit()


    return jsonify({"success":True})
Esempio n. 2
0
def field(datasetname):
    """
    get the column names and any existing info for them
    - add check for if source name does not exist
    """
    dataset = get_dataset(datasetname)

    if dataset.mapping:
        #we have a model.  Get the model info
        modeler = dataset.mapping['mapping']
        refineproj = dataset.source.get_or_create_ORProject()
        columns = refineproj.refineproj.columns
        return jsonify({
            "columns": columns,
            "modeler": modeler
        },
                       headers={'Cache-Control': 'no-cache'})
    else:
        refineproj = dataset.source.get_or_create_ORProject()
        headers = {'Cache-Control': 'no-cache'}

        basemodeler = DEFAULT_SOURCE_MAPPING

        return jsonify(
            {
                "columns": refineproj.refineproj.columns,
                'modeler': basemodeler
            },
            headers=headers)
Esempio n. 3
0
def field_polling_post(datasetname, columnkey):
    """
    post to check to verify that the column is good
    """

    #print request.get_json().get('columnval', None)
    ORcolumn = request.get_json().get('columnval', None)
    if not ORcolumn:
        return jsonify({"errors":["could not find the column name"]})

    dataset = get_dataset(datasetname)

    if not require.dataset.update(dataset):
        return jsonify({"errors":["Permission denied"]})

    try:
        columnsettings = api_form_data()

        #use this later if async run is necessary
        #runop = Run(columnsettings['columnval'], dataset, source)
        #db.session.add(runop)
        #db.session.commit()

        #check_column.apply_async(args=[source.id, columnkey, columnsettings['columnval'], runop.id], countdown=1)
        resultval = check_column(dataset.source.id, columnkey, columnsettings['columnval'])

        if len(resultval['errors']) == 0:
            return jsonify({"success":True})
        else:
            return jsonify(resultval)
    except Exception, e:
        print "here is my error", e
        return jsonify({"errors":['Unknown Error has occurred']})
Esempio n. 4
0
def field_polling_post(datasetname, columnkey):
    """
    post to check to verify that the column is good
    """

    #print request.get_json().get('columnval', None)
    ORcolumn = request.get_json().get('columnval', None)
    if not ORcolumn:
        return jsonify({"errors": ["could not find the column name"]})

    dataset = get_dataset(datasetname)

    if not require.dataset.update(dataset):
        return jsonify({"errors": ["Permission denied"]})

    try:
        columnsettings = api_form_data()

        #use this later if async run is necessary
        #runop = Run(columnsettings['columnval'], dataset, source)
        #db.session.add(runop)
        #db.session.commit()

        #check_column.apply_async(args=[source.id, columnkey, columnsettings['columnval'], runop.id], countdown=1)
        resultval = check_column(dataset.source.id, columnkey,
                                 columnsettings['columnval'])

        if len(resultval['errors']) == 0:
            return jsonify({"success": True})
        else:
            return jsonify(resultval)
    except Exception, e:
        print "here is my error", e
        return jsonify({"errors": ['Unknown Error has occurred']})
Esempio n. 5
0
def ORoperations(datasetname):
    try:
        dataset = get_dataset(datasetname)

        ORinstructions = dataset.source.getORInstructions()
        return jsonify(ORinstructions, headers= {'Cache-Control' : 'no-cache'})
    except Exception, e:
        return jsonify({"error":"Could not fetch the ORinstructions" + str(e)})
Esempio n. 6
0
def view(name):
    """
    Get the dataset info to populate a form
    """

    dataset = get_dataset(name)
    outputdict = dataset.detailed_dict()
    return jsonify(outputdict, headers={'Cache-Control': 'no-cache'})
Esempio n. 7
0
def view(name):
    """
    Get the dataset info to populate a form
    """

    dataset = get_dataset(name)
    outputdict = dataset.detailed_dict()
    return jsonify(outputdict, headers= {'Cache-Control' : 'no-cache'})
Esempio n. 8
0
def update_model(datasetname):

    #we just got everything now let's save it
    sourcemeta = request.get_json().get("meta", None)
    sourcemodeler = request.get_json().get("modeler", None)
    #validate that we have everything here

    r = {"mapping":sourcemodeler}

    #let's handle the compounds
    for item in r['mapping'].values():
        if item['type'] in  ("compound", "geometry"):
            for attitem in item['attributes'].values():
                if attitem['column'] == 'countryid':
                    pass
                attitem['column'] = item['column']

    #if not hasattr(r['mapping'], 'theid'):
    r['mapping']['theid'] = {
                              "default_value": "",
                              "description": "Unique ID",
                              "datatype": "string",
                              "key": True,
                              "label": "UniqueID",
                              "column": "uniqueid",
                              "type": "attribute",
                              "form": {
                                "label": "Unique Identifier"
                                }
                            }

    r['mapping']['geom_time_id'] = {
                              "default_value": "",
                              "description": "Geometry Time ID",
                              "datatype": "integer",
                              "label": "Geometry Time ID",
                              "column": "geom_time_id",
                              "type": "geom_time_id",
                              "form": {
                                "label": "Geometry-Time ID"
                                }
                            }




    dataset = get_dataset(datasetname)
    dataset.mapping = r
    dataset.ORoperations = {'data': dataset.source.getORInstructions()}
    dataset.source.addData(r)
    db.session.commit()


    load_source(dataset.source.id)
    cache.clear()
    #add async request to load data

    return jsonify({"success":True})
Esempio n. 9
0
def ORoperations(datasetname):
    try:
        dataset = get_dataset(datasetname)

        ORinstructions = dataset.source.getORInstructions()
        return jsonify(ORinstructions, headers={'Cache-Control': 'no-cache'})
    except Exception, e:
        return jsonify(
            {"error": "Could not fetch the ORinstructions" + str(e)})
Esempio n. 10
0
def get_run(dataset, source, id):
    dataset = get_dataset(dataset)
    source = obj_or_404(Source.by_id(source))
    if source.dataset != dataset:
        raise BadRequest("There was no source")
    run = obj_or_404(Run.by_id(id))
    if run.source != source:
        raise BadRequest("There is no run %s" % str(id))
    return dataset, source, run
Esempio n. 11
0
def get_run(dataset, source, id):
    dataset = get_dataset(dataset)
    require.dataset.update(dataset)
    source = obj_or_404(Source.by_id(source))
    if source.dataset != dataset:
        raise BadRequest("There was no source")
    run = obj_or_404(Run.by_id(id))
    if run.source != source:
        raise BadRequest("There is no run '" + str(id) + '")
    return dataset, source, run
Esempio n. 12
0
def update_model(datasetname):

    #we just got everything now let's save it
    sourcemeta = request.get_json().get("meta", None)
    sourcemodeler = request.get_json().get("modeler", None)
    #validate that we have everything here

    r = {"mapping": sourcemodeler}

    #let's handle the compounds
    for item in r['mapping'].values():
        if item['type'] in ("compound", "geometry"):
            for attitem in item['attributes'].values():
                if attitem['column'] == 'countryid':
                    pass
                attitem['column'] = item['column']

    #if not hasattr(r['mapping'], 'theid'):
    r['mapping']['theid'] = {
        "default_value": "",
        "description": "Unique ID",
        "datatype": "string",
        "key": True,
        "label": "UniqueID",
        "column": "uniqueid",
        "type": "attribute",
        "form": {
            "label": "Unique Identifier"
        }
    }

    r['mapping']['geom_time_id'] = {
        "default_value": "",
        "description": "Geometry Time ID",
        "datatype": "integer",
        "label": "Geometry Time ID",
        "column": "geom_time_id",
        "type": "geom_time_id",
        "form": {
            "label": "Geometry-Time ID"
        }
    }

    dataset = get_dataset(datasetname)
    dataset.mapping = r
    dataset.ORoperations = {'data': dataset.source.getORInstructions()}
    dataset.source.addData(r)
    db.session.commit()

    load_source(dataset.source.id)
    cache.clear()
    #add async request to load data

    return jsonify({"success": True})
Esempio n. 13
0
def field_polling_check(datasetname, columnname):
    """
    GET to check if the run is complete
    """
    dataset = get_dataset(datasetname)

    if dataset.data:
        #we have a model.  Get the model info
        return jsonify({"error":"not yet implemented"})
    else:
        refineproj = dataset.source.get_or_create_ORProject()
        # this is awkward.  the class should be extended
        return jsonify(refineproj.refineproj.columns)
Esempio n. 14
0
def delete(datasetname):
    try:
        dataset = get_dataset(datasetname)

        db.session.delete(dataset.source)
        db.session.commit()
        clear_index_cache()

        #drop solr index
        #solr.drop_index(source.name)
        return jsonify(True)
    except Exception, e:
        return jsonify({"errors": [e]})
Esempio n. 15
0
def field_polling_check(datasetname, columnname):
    """
    GET to check if the run is complete
    """
    dataset = get_dataset(datasetname)

    if dataset.data:
        #we have a model.  Get the model info
        return jsonify({"error": "not yet implemented"})
    else:
        refineproj = dataset.source.get_or_create_ORProject()
        # this is awkward.  the class should be extended
        return jsonify(refineproj.refineproj.columns)
Esempio n. 16
0
def delete(datasetname):
    try:
        dataset = get_dataset(datasetname)
        require.dataset.update(dataset)

        db.session.delete(dataset.source)
        db.session.commit()
        clear_index_cache()


        #drop solr index
        #solr.drop_index(source.name)
        return jsonify(True)
    except Exception, e:
        return jsonify({"errors":[e]})
Esempio n. 17
0
def model(datasetname):
    #if not sourcename then we are saving the defaults for dataset
    
    dataset = get_dataset(datasetname)
    if not dataset.source:
        #then create one
        dataset_source = Source.by_source_name(dataset.name)
        if not dataset_source:
            dataset_source = Source(name=dataset.name, dataset=dataset)
            db.session.add(dataset_source)
        else:
            dataset_source.dataset = dataset
        db.session.commit()

        #figure out what they need over there?
    return jsonify(dataset.source)
Esempio n. 18
0
def update(name):
    """
    Update a dataset with a json object and name from the dataset form
    """
    try:
        dataset = get_dataset(name)
        schema = dataset_schema(ValidationState(dataset))
        data = schema.deserialize(api_form_data())

        dataset.update(data)
        db.session.commit()
        #clear_index_cache()
        return jsonify({"success": True})
    except Exception, e:
        print e
        return jsonify({"errors": ['Unknown Error has occurred']})
Esempio n. 19
0
def model(datasetname):
    #if not sourcename then we are saving the defaults for dataset

    dataset = get_dataset(datasetname)
    if not dataset.source:
        #then create one
        dataset_source = Source.by_source_name(dataset.name)
        if not dataset_source:
            dataset_source = Source(name=dataset.name, dataset=dataset)
            db.session.add(dataset_source)
        else:
            dataset_source.dataset = dataset
        db.session.commit()

        #figure out what they need over there?
    return jsonify(dataset.source)
Esempio n. 20
0
def update(name):
    """
    Update a dataset with a json object and name from the dataset form
    """
    try:
        dataset = get_dataset(name)
        require.dataset.update(dataset)
        schema = dataset_schema(ValidationState(dataset))
        data = schema.deserialize(api_form_data())

        dataset.update(data)
        db.session.commit()
        #clear_index_cache()
        return jsonify({"success":True})
    except Exception, e:
        print e
        return jsonify({"errors":['Unknown Error has occurred']}) 
Esempio n. 21
0
def slicer_model():
    # options
    # get dataset info
    results = {"models": {}, "options": {}}
    cubesarg = request.args.get("cubes", [])
    cubes = cubesarg.split("|")
    for cube in cubes:
        dataset = get_dataset(cube)
        if dataset:
            results["models"][cube] = dataset.detailed_dict()

    results["options"] = GEO_MAPPING

    results["formats"] = FORMATOPTS

    resp = Response(response=to_json(results), status=200, mimetype="application/json")
    return resp
Esempio n. 22
0
def apply_default_model(datasetname):

    dataset = get_dataset(datasetname)

    if not dataset.dataorg or not dataset:
        return jsonify({"errors": ["Invalid URL.  Could not find dataorg"]})

    dataorg = dataset.dataorg

    if not dataorg.ORTemplate or not dataorg.mappingTemplate:
        return jsonify({"errors": ["Dataorg has no template"]})

    dataset.source.applyORInstructions(dataorg.ORTemplate)

    dataset.mapping = dataorg.mappingTemplate

    db.session.commit()

    return jsonify(dataset.source, headers={'Cache-Control': 'no-cache'})
Esempio n. 23
0
    def get_json_result(self):
        results = self._getcache()
        resultmodel = {"cells": results}

        tempmodel = {}
        for dataset in self.cubes:
            tempmodel[dataset] = get_dataset(dataset).detailed_dict()

        resultmodel['models'] = tempmodel

        resultmodel['attributes'] = self.drilldowntables

        if self.clusterparams['cluster']:
            resultmodel['cluster'] = self.get_clusters(resultmodel['cells'])

        resp = Response(response=to_json(resultmodel),
                status=200, \
                mimetype="application/json")
        return resp
Esempio n. 24
0
def update_model_createnew(datasetname):
    #refactor to include the update

    dataset = get_dataset(datasetname)

    #source will have name and URL
    sourceapi = api_form_data()

    if not sourceapi['name']:
        sourceapi['name'] = dataset.name
        #return jsonify({"errors":["You must enter a data name " + str(e)]})

    #verify that name is unique and URL is real
    #model = {'source': source}
    schema = source_schema(ValidationState(sourceapi))
    try:
        data = schema.deserialize(sourceapi)
    except Invalid, e:
        #print message in thefuture
        return jsonify({"errors": ["Invalid field " + str(e)]})
Esempio n. 25
0
def slicer_model():
    #options
    #get dataset info
    results = {"models": {}, "options": {}}
    cubesarg = request.args.get("cubes", [])
    cubes = cubesarg.split("|")
    for cube in cubes:
        dataset = get_dataset(cube)
        if dataset:
            results['models'][cube] = dataset.detailed_dict()

    results['options'] = GEO_MAPPING

    results['formats'] = FORMATOPTS


    resp = Response(response=to_json(results),
            status=200, \
            mimetype="application/json")
    return resp
Esempio n. 26
0
def field(datasetname):
    """
    get the column names and any existing info for them
    - add check for if source name does not exist
    """
    dataset = get_dataset(datasetname)

    if dataset.mapping:
        #we have a model.  Get the model info
        modeler = dataset.mapping['mapping']
        refineproj = dataset.source.get_or_create_ORProject()
        columns = refineproj.refineproj.columns
        return jsonify({"columns":columns, "modeler":modeler}, headers= {'Cache-Control' : 'no-cache'})
    else:
        refineproj = dataset.source.get_or_create_ORProject()
        headers= {'Cache-Control' : 'no-cache'}

        basemodeler = DEFAULT_SOURCE_MAPPING

        return jsonify({"columns": refineproj.refineproj.columns, 'modeler':basemodeler}, headers=headers)
Esempio n. 27
0
def update_model_createnew(datasetname):
    #refactor to include the update

    dataset = get_dataset(datasetname)


    #source will have name and URL
    sourceapi = api_form_data()

    if not sourceapi['name']:
        sourceapi['name'] = dataset.name
        #return jsonify({"errors":["You must enter a data name " + str(e)]})

    #verify that name is unique and URL is real
    #model = {'source': source}
    schema = source_schema(ValidationState(sourceapi))
    try:
        data = schema.deserialize(sourceapi)
    except Invalid, e:
        #print message in thefuture
        return jsonify({"errors":["Invalid field " + str(e)]})
Esempio n. 28
0
    def get_json_result(self):
        results = self._getcache()
        resultmodel = {
            "cells": results
        }

        tempmodel = {}
        for dataset in self.cubes:
            tempmodel[dataset] = get_dataset(dataset).detailed_dict()


        resultmodel['models'] = tempmodel

        resultmodel['attributes'] = self.drilldowntables

        if self.clusterparams['cluster']:
            resultmodel['cluster'] = self.get_clusters(resultmodel['cells'])        
        
        resp = Response(response=to_json(resultmodel),
                status=200, \
                mimetype="application/json")
        return resp
Esempio n. 29
0
def apply_default_model(datasetname):

    dataset = get_dataset(datasetname)

    if not dataset.dataorg or not dataset:
        return jsonify({"errors":["Invalid URL.  Could not find dataorg"]})

    dataorg = dataset.dataorg

    if not dataorg.ORTemplate or not dataorg.mappingTemplate:
        return jsonify({"errors":["Dataorg has no template"]})

    dataset.source.applyORInstructions(dataorg.ORTemplate)


    dataset.mapping = dataorg.mappingTemplate


    db.session.commit()



    return jsonify(dataset.source, headers= {'Cache-Control' : 'no-cache'})
Esempio n. 30
0
def save_default_model(datasetname):

    dataset = get_dataset(datasetname)

    if not dataset.mapping or not dataset.source:
        return jsonify({"errors": ["No mapping for this dataset"]})

    if not dataset.dataorg:
        return jsonify({"errors": ['Has no dataorg']})

    #get the OR instructions from dataset
    ORinstructions = dataset.source.getORInstructions()

    #get the OR instructions from dataset
    mapping = dataset.mapping

    dataorg = dataset.dataorg

    dataorg.ORTemplate = {"data": ORinstructions}
    dataorg.mappingTemplate = mapping

    db.session.commit()

    return jsonify({"success": True})
Esempio n. 31
0
    def cube(self, name, locale=None, namespace=None, metaonly=None):

        if name == "geometry":
            return getGeomCube(self, metaonly)

        dataset = get_dataset(name)
        if name is None:
            raise NoSuchCubeError("Unknown dataset %s" % name, name)



        mappings = {}
        joins = []
        fact_table = dataset.source.model.table.name

        aggregates = [MeasureAggregate('num_entries',
                                       label='Number of entries',
                                       function='count')]
        measures = []
    #         "wma": partial(_window_function_factory, window_function=weighted_moving_average, label='Weighted Moving Avg. of {measure}'),
    # "sma": partial(_window_function_factory, window_function=simple_moving_average, label='Simple Moving Avg. of {measure}'),
    # "sms": partial(_window_function_factory, window_function=simple_moving_sum, label='Simple Moving Sum of {measure}'),
        aggregation_funcs = ["sum", "min", "max", "avg"]

        for measure in dataset.source.model.measures:
            cubes_measure = Measure(measure.name, label=measure.label)
            measures.append(cubes_measure)
            for agg_func in aggregation_funcs:
                aggregate = MeasureAggregate(measure.name + "_" + agg_func,
                                             label=measure.label  + agg_func,
                                             measure=measure.name,
                                             function=agg_func)
                aggregates.append(aggregate)

        dimensions = []
        for dim in dataset.source.model.dimensions:
            meta = dim.to_cubes(mappings, joins)
            meta.update({'name': dim.name, 'label': dim.label})
            dimensions.append(Dimension.from_metadata(meta))



        cube_meta = {"name":dataset.name,
                                "fact":fact_table,
                                "aggregates":aggregates,
                                "measures":measures,
                                #change these when they get addeed to the model
                                "label":dataset.label,
                                "description":"non null description",
                                "dimensions":dimensions,
                                "store":self.store,
                                "mappings":mappings,
                                "joins":joins}


        if metaonly:
            return cube_meta
        else:
            return Cube(name=cube_meta['name'],
                            fact=cube_meta['fact'],
                            aggregates=cube_meta['aggregates'],
                            measures=cube_meta['measures'],
                            label=cube_meta['label'],
                            description=cube_meta['description'],
                            dimensions=cube_meta['dimensions'],
                            store=cube_meta['store'],
                            mappings=cube_meta['mappings'],
                            joins=cube_meta['joins'])
Esempio n. 32
0
    def cube(self, name, locale=None, namespace=None, metaonly=None):

        if name == "geometry":
            return getGeomCube(self, metaonly)

        dataset = get_dataset(name)
        if name is None:
            raise NoSuchCubeError("Unknown dataset %s" % name, name)

        mappings = {}
        joins = []
        fact_table = dataset.source.model.table.name

        aggregates = [
            MeasureAggregate('num_entries',
                             label='Number of entries',
                             function='count')
        ]
        measures = []
        #         "wma": partial(_window_function_factory, window_function=weighted_moving_average, label='Weighted Moving Avg. of {measure}'),
        # "sma": partial(_window_function_factory, window_function=simple_moving_average, label='Simple Moving Avg. of {measure}'),
        # "sms": partial(_window_function_factory, window_function=simple_moving_sum, label='Simple Moving Sum of {measure}'),
        aggregation_funcs = ["sum", "min", "max", "avg"]

        for measure in dataset.source.model.measures:
            cubes_measure = Measure(measure.name, label=measure.label)
            measures.append(cubes_measure)
            for agg_func in aggregation_funcs:
                aggregate = MeasureAggregate(measure.name + "_" + agg_func,
                                             label=measure.label + agg_func,
                                             measure=measure.name,
                                             function=agg_func)
                aggregates.append(aggregate)

        dimensions = []
        for dim in dataset.source.model.dimensions:
            meta = dim.to_cubes(mappings, joins)
            meta.update({'name': dim.name, 'label': dim.label})
            dimensions.append(Dimension.from_metadata(meta))

        cube_meta = {
            "name": dataset.name,
            "fact": fact_table,
            "aggregates": aggregates,
            "measures": measures,
            #change these when they get addeed to the model
            "label": dataset.label,
            "description": "non null description",
            "dimensions": dimensions,
            "store": self.store,
            "mappings": mappings,
            "joins": joins
        }

        if metaonly:
            return cube_meta
        else:
            return Cube(name=cube_meta['name'],
                        fact=cube_meta['fact'],
                        aggregates=cube_meta['aggregates'],
                        measures=cube_meta['measures'],
                        label=cube_meta['label'],
                        description=cube_meta['description'],
                        dimensions=cube_meta['dimensions'],
                        store=cube_meta['store'],
                        mappings=cube_meta['mappings'],
                        joins=cube_meta['joins'])