コード例 #1
0
def update_data():
    data = loads_json(fetch(configuration.get('app.cases_data_source_url')))
    casos = data['casos']
    dias = casos['dias']

    # db.casos.fecha.requires = IS_DATE(format=('%Y/%m/%d'))
    # db.casos.arribo_a_cuba_foco.requires = IS_DATE(format=('%Y/%m/%d'))
    # db.casos.consulta_medico.requires = IS_DATE(format=('%Y/%m/%d'))

    for dia in dias.keys():
        fecha = dias[dia]['fecha']
        if 'diagnosticados' not in dias[dia]:
            continue
        diagnosticados_dia = dias[dia]['diagnosticados']
        for diagnosticado in diagnosticados_dia:
            diagnosticado['arribo_a_cuba_foco'] = diagnosticado['arribo_a_cuba_foco'].replace('/', '-') if diagnosticado['arribo_a_cuba_foco'] else diagnosticado['arribo_a_cuba_foco']
            diagnosticado['consulta_medico'] = diagnosticado['consulta_medico'].replace('/', '-') if diagnosticado['consulta_medico'] else diagnosticado['consulta_medico']

            diagnosticado['codigo'] = diagnosticado['id']
            del diagnosticado['id']
            diagnosticado['fecha'] = fecha.replace('/', '-') if fecha else fecha
            diagnosticado['dia'] = dia
            db.casos.update_or_insert(db.casos.codigo == diagnosticado['codigo'], **diagnosticado)
            # db.casos.insert(**diagnosticado)
    db.commit()
    return True


    # print('akakak', data['casos'])
コード例 #2
0
ファイル: default.py プロジェクト: tazjel/w2pgis
def get_geojson():
    rows= db(db.geostuff).select(db.geostuff.name, db.geostuff.geometry.st_asgeojson())

    features= [{"type": "Feature",
                "properties": {
                    "popupContent": r[db.geostuff.name]
                },
                "geometry": loads_json(r[db.geostuff.geometry.st_asgeojson()])} for r in rows] 

    return response.json({"type": "FeatureCollection", 'features': features})
コード例 #3
0
def get_geojson():
    rows = db(db.geostuff).select(db.geostuff.name,
                                  db.geostuff.geometry.st_asgeojson())

    features = [{
        "type": "Feature",
        "properties": {
            "popupContent": r[db.geostuff.name]
        },
        "geometry": loads_json(r[db.geostuff.geometry.st_asgeojson()])
    } for r in rows]

    return response.json({"type": "FeatureCollection", 'features': features})
コード例 #4
0
def get_geojson3():
    rows = dbpg().select(dbpg.ippos.ip,
                         dbpg.ippos.descricao,
                         dbpg.ippos.pos.st_asgeojson(),
                         orderby=dbpg.ippos.id)
    features = [{
        "type": "Feature",
        "properties": {
            "popupContent": r[dbpg.ippos.ip]
        },
        "geometry": loads_json(r[dbpg.ippos.pos.st_asgeojson()])
    } for r in rows]
    return response.json(dict(type="FeatureCollection", features=features))
コード例 #5
0
def insert_temp_geom(geojson_dict):
    # from gluon import current
    # this is required as a module level variable is instantiated and won't change when thread in pool is destroyed
    # but db still referring to it. therefore it needs to be inside the function
    db = current.db

    import json
    from gluon.serializers import loads_json

    # loads_json converts a string of json to a python dict
    # geojson_dict is input from jquery, with its key the content
    json_dict = loads_json(geojson_dict.keys()[0])

    # if en empty geojson is submitted
    if len(json_dict['features']) == 0:
        return 'No feature submitted'

    # get a unique id
    tempid = len(db().select(db.tempgeom.id)) + 1

    try:
    # for each geojson feature do a db insert
        fullsql = ''

        for eachfeature in json_dict['features']:

            # each feature is a dict of keys: geometry, type and properties, of which only geometry is of interest
            feature_dict = eachfeature['geometry']

            # ensure crs is included, by default WGS84
            feature_dict['crs'] = {'type': 'name', 'properties':{'name':'EPSG:4326'}}

            # dump in json format and construct sql to insert record
            sql = "INSERT INTO tempgeom (tempid, geom) VALUES ({}, st_geomfromgeojson({!r}));".format(tempid, json.dumps((feature_dict)))

            fullsql += sql + '\n'
            # run queries
            db.executesql(sql)

    except Exception as e:
        print e
        return False

    #     # debug
    # else:
    #     return str(tempid) + '\n' + fullsql
    else:
        return tempid
コード例 #6
0
ファイル: procedure.py プロジェクト: diogommartins/api
def index():
    """
    Responde a requisições do tipo POST para endpoints `procedure/*`

    Dados da requisição devem ter:
    "API_KEY" -> key para obter acesso aos dados.
    "data" -> Lista de dicionários, onde cada dicionário é usado como argumento para execução de uma procedure.

    Opcionalmente pode conter:

    'async' -> se requisição será processada de forma assíncrona.
    'fields' - > campos que serão retornados.

    """
    params = loads_json(request.body.read())
    procedure_name = APIRequest.procedure_for_path(request.env.PATH_INFO)

    try:
        procedure = Procedure(procedure_name, datasource)
    except UndefinedProcedureException as e:
        raise HTTP(http.NOT_FOUND, e.msg)

    api_key = APIKey(db, params['API_KEY'])
    if not api_key.auth:
        raise HTTP(http.UNAUTHORIZED, "API Key inválida ou inativa")

    if not APIProcedurePermissions(api_key,
                                   procedure_name).can_perform_api_call():
        raise HTTP(http.UNAUTHORIZED, "Nao pode.")

    validator = ProcedureDatasetValidator(procedure)

    try:
        validator = ProcedureDatasetValidator(procedure)
        valid_datasets = tuple(dataset for dataset in params['data']
                               if validator.is_valid_dataset(dataset))
    except ValueError as e:
        raise HTTP(http.BAD_REQUEST, e.message)  # Invalid dataset

    response.view = 'generic.json'

    for dataset in valid_datasets:
        if params['async']:
            _async(dataset, params, procedure_name)
        else:
            _sync(dataset, params, procedure)
コード例 #7
0
 def GET(*args, **vars):
     rows = dbpg().select(dbpg.ippos.ip,
                          dbpg.ippos.descricao,
                          dbpg.ippos.pos.st_asgeojson(),
                          orderby=dbpg.ippos.id)
     features = [{
         "type": "Feature",
         "properties": {
             "popupContent":
             r[dbpg.ippos.ip] + "</br>" + r[dbpg.ippos.descricao]
         },
         "geometry": loads_json(r[dbpg.ippos.pos.st_asgeojson()])
     } for r in rows]
     return response.json({
         "type": "FeatureCollection",
         'features': features
     })
コード例 #8
0
    def export(self):
        if self.rows:

            # get a list of dictionaries of the values
            ft_as_dicts = list(self.rows.as_dict().values())

            # pop out the geometry components and id
            id_number = [ft.pop('id') for ft in ft_as_dicts]

            # Get the locations - for geojson, we need an id, a geometry and a dictionary
            # of properties. This query uses the with_alias() to strucure the results with
            # id and geojson outside of the 'gazetteer' table dictionary, making it really
            # simple to restucture them into a geojson Feature entry
            locations = db(db.gazetteer.id.belongs(id_number)).select(
                db.gazetteer.id.with_alias('id'), db.gazetteer.location,
                db.gazetteer.type, db.gazetteer.parent, db.gazetteer.region,
                db.gazetteer.plot_size, db.gazetteer.fractal_order,
                db.gazetteer.transect_order,
                db.gazetteer.wkt_wgs84.st_asgeojson().with_alias(
                    'geojson')).as_list()

            # assemble the features list - postgres returns ST_AsGeoJSON as a string, so
            # this has to be converted back into a dictionary.
            features = [{
                'type': "Feature",
                'id': loc['id'],
                'properties': loc['gazetteer'],
                'geometry': loads_json(loc['geojson'])
            } for loc in locations]

            # embed that in the Feature collection
            feature_collection = {
                "type": "FeatureCollection",
                "crs": {
                    "type": "name",
                    "properties": {
                        "name": "urn:ogc:def:crs:OGC:1.3:CRS84"
                    }
                },
                "features": features
            }

            return simplejson.dumps(feature_collection)
        else:
            return ''
コード例 #9
0
ファイル: procedure.py プロジェクト: diogommartins/api
def index():
    """
    Responde a requisições do tipo POST para endpoints `procedure/*`

    Dados da requisição devem ter:
    "API_KEY" -> key para obter acesso aos dados.
    "data" -> Lista de dicionários, onde cada dicionário é usado como argumento para execução de uma procedure.

    Opcionalmente pode conter:

    'async' -> se requisição será processada de forma assíncrona.
    'fields' - > campos que serão retornados.

    """
    params = loads_json(request.body.read())
    procedure_name = APIRequest.procedure_for_path(request.env.PATH_INFO)

    try:
        procedure = Procedure(procedure_name, datasource)
    except UndefinedProcedureException as e:
        raise HTTP(http.NOT_FOUND, e.msg)

    api_key = APIKey(db, params['API_KEY'])
    if not api_key.auth:
        raise HTTP(http.UNAUTHORIZED, "API Key inválida ou inativa")

    if not APIProcedurePermissions(api_key, procedure_name).can_perform_api_call():
        raise HTTP(http.UNAUTHORIZED, "Nao pode.")

    validator = ProcedureDatasetValidator(procedure)

    try:
        validator = ProcedureDatasetValidator(procedure)
        valid_datasets = tuple(dataset for dataset in params['data'] if validator.is_valid_dataset(dataset))
    except ValueError as e:
        raise HTTP(http.BAD_REQUEST, e.message)  # Invalid dataset

    response.view = 'generic.json'

    for dataset in valid_datasets:
        if params['async']:
            _async(dataset, params, procedure_name)
        else:
            _sync(dataset, params, procedure)
コード例 #10
0
def get_geom_as_json_by_uid(uid, tab_geom=TEMPGEOM):
    from gluon.serializers import loads_json

    db = current.db

    if tab_geom == TEMPGEOM:
        table_id = 'tempid'
    elif tab_geom == REALMBIOME_GEOM:
        table_id = 'bid'
    elif tab_geom == WH_GEOM:
        table_id = 'wdpaid'
    else:
        # raise Exception('Invalid geom table {}'.format(tab_geom))
        return False

    # get geom rows
    rows = db(db[tab_geom][table_id] == uid).select(db[tab_geom].geom.st_asgeojson().with_alias('geom'))

    ## geojson need to be constructured
    features = [{"type": "Feature", "geometry": loads_json(row['geom'])} for row in rows]
    result = {"type": "FeatureCollection", "features": features}

    return result
コード例 #11
0
ファイル: default.py プロジェクト: Carzeh/web2py-book
def rebuild_sources():
    github_cidrs = ["204.232.175.64/27", "192.30.252.0/22"]
    check_cidr = CIDRConv(cidrs=github_cidrs)
    originator = request.env.remote_addr
    is_valid = check_cidr.valid_ip(originator)
    if not is_valid:
        raise HTTP(404)
    payload = request.post_vars.payload
    if not payload:
        raise HTTP(404)
    payload = loads_json(payload)
    commits = payload.get("commits", [])
    rebuild = False
    for commit in commits:
        author = commit.get("author", {"name": ""})
        if author["name"] == "mdipierro":  # rebuild only on massimo's commits
            rebuild = True
            break
    if not rebuild:
        raise HTTP(200)
    dest = os.path.join(request.folder, "private", "rebuild_me")
    with open(dest, "w") as g:
        g.write("ok")
    return "ok"
コード例 #12
0
def rebuild_sources():
    github_cidrs = ['204.232.175.64/27', '192.30.252.0/22']
    check_cidr = CIDRConv(cidrs=github_cidrs)
    originator = request.env.remote_addr
    is_valid = check_cidr.valid_ip(originator)
    if not is_valid:
        raise HTTP(404)
    payload = request.post_vars.payload
    if not payload:
        raise HTTP(404)
    payload = loads_json(payload)
    commits = payload.get('commits', [])
    rebuild = False
    for commit in commits:
        author = commit.get('author', {'name' : ''})
        if author['name'] == 'mdipierro': #rebuild only on massimo's commits
            rebuild = True
            break
    if not rebuild:
        raise HTTP(200)
    dest = os.path.join(request.folder, 'private', 'rebuild_me')
    with open(dest, 'w') as g:
        g.write('ok')
    return 'ok'
コード例 #13
0
ファイル: default.py プロジェクト: m8ram/web2py-book
def rebuild_sources():
    github_cidrs = ['204.232.175.64/27', '192.30.252.0/22']
    check_cidr = CIDRConv(cidrs=github_cidrs)
    originator = request.env.remote_addr
    is_valid = check_cidr.valid_ip(originator)
    if not is_valid:
        raise HTTP(404)
    payload = request.post_vars.payload
    if not payload:
        raise HTTP(404)
    payload = loads_json(payload)
    commits = payload.get('commits', [])
    rebuild = False
    for commit in commits:
        author = commit.get('author', {'name': ''})
        if author['name'] == 'mdipierro':  # rebuild only on massimo's commits
            rebuild = True
            break
    if not rebuild:
        raise HTTP(200)
    dest = os.path.join(request.folder, 'private', 'rebuild_me')
    with open(dest, 'w') as g:
        g.write('ok')
    return 'ok'
コード例 #14
0
ファイル: turk.py プロジェクト: jbragg/utility
def store_get(key):
    r = db(db.store.key==key).select().first()
    return r and serializers.loads_json(r.value)
コード例 #15
0
def api():
    """
    Action args:
    (index/value)
    1: DAL instance name
    3: One of setup/form/query
    5: Tablename
    7: Record id

    WARNING: this api exposes all validator options to the client
    For fine-grained access control, consider applying default
    validator filters

    NOTE: multiple db CRUD is not supported unless you
    write your own validation because it is not
    implemented in Auth.

    WARNING: applying some queries without sanitization might
    expose the system to code injections. You should inspect
    each validator and any other environment object sent
    by the client.

    Only json queries supported.
    TODO: read xml and yaml queries

    Note: Query dicts do not implement .select(args)

    Test equirements
    Logged-in auth user
    This is an example of w2p dict query
    {'second': 0, 'ignore_common_filters': false, 'optional_args': {},
     'first': {'fieldname': 'id', 'tablename': 'auth_user'},
     'op': 'GT'}
     It's equivalent to the server query expression:
     db.auth_user.id > 0

    """

    # CORS basic setup
    if myclientapi.settings.origin:
        response.headers['Access-Control-Allow-Origin'] = \
            myclientapi.settings.origin
        response.headers['Access-Control-Allow-Credentials'] = \
            myclientapi.settings.credentials or 'false'
        response.headers['Access-Control-Allow-Methods'] = \
            myclientapi.settings.methods or ''

    # handle logging
    if myclientapi.settings.log and (not myclientapi.settings.logged):
        myclientapi.settings.log()
        myclientapi.settings.logged = True

    if "_serialized" in request.vars:
        _serialized = request.vars.pop("_serialized")
        try:
            serialized = loads_json(_serialized)
        except (ValueError, TypeError, SyntaxError):
            raise HTTP(500, T("Error retrieving serialized values"))
        for k in request.vars.keys():
            try:
                if k in serialized:
                    request.vars[k] = loads_json(request.vars[k])
            except (ValueError, TypeError, SyntaxError):
                raise HTTP(500, T("Could not parse the %s value") % k)

    if request.args(3) == "setup":
        result = myclientapi.settings.rbac("setup", None, None, None)
        if result[0]:
            schemes = result[1]
        else:
            raise HTTP(403, T("Access denied (no setup rights)"))
        return dict(schemes=schemes, dbnames=schemes.keys())

    elif request.args(3) == "form":
        table = myclientapi.database[request.args(5)]
        record = request.args(7)
        if record:
            if myclientapi.settings.rbac("form", "update", table,
                                         record)[0]:
                form = SQLFORM(table, record, deletable=True)
            else:
                raise HTTP(403, T("Access denied (no update rights)"))
        else:
            if myclientapi.settings.rbac("form", "create", table,
                                         None)[0]:
                form = SQLFORM(table)
            else:
                raise HTTP(403, T("Access denied (no create rights)"))
        form.process()
        data = form.as_dict(flat=True)
        response.flash = None
        return dict(form=data)

    elif request.args(3) == "query":
        if request.is_gae:
            raise HTTP(500, T("Not implemented"))
        query = myclientapi.database(request.vars.query).query
        result = myclientapi.settings.rbac("query",
                                           None, None, query)
        if result[0]:
            if result[1] is None:
                raise HTTP(500, T("Invalid Query"))
            else:
                data = myclientapi.database(
                    result[1]).select().as_dict()
        else:
            raise HTTP(403, T("Access denied (no query rights)"))
        return dict(rows=data)

    elif request.args(3) == "user":
        if request.args(5) == "login":
            if auth.user_id:
                raise HTTP(500,
                           T("There is an already open user session"))
            else:
                result = auth.login_bare(request.vars.username,
                                         request.vars.password)
                if result:
                    message = T("ok")
                    profile = auth.user.as_dict()
                else:
                    message = T("Login failed")
                    profile = None
                return dict(profile=profile, message=message)
        elif request.args(5) == "logout":
            if not auth.is_logged_in():
                raise HTTP(500, T("There is no user session"))
            session.auth = None
            return dict(profile=None, message=T("ok"))
        else: raise HTTP(500,
                         T("Not implemented: %s") % request.args(5))
        return dict(result=None)
    else:
        raise HTTP(500, T("Invalid operation %s") % request.args(3))
コード例 #16
0
def gazetteer():
    """
    Controller to provide a map view of the gazetteer data and a searchable
    interface with GPX download.
    """

    # If the grid has set up some search keywords, and the keywords aren't an empty
    # string then use them to select those rows, otherwise get all rows
    sfields = [
        db.gazetteer.location, db.gazetteer.type, db.gazetteer.plot_size,
        db.gazetteer.fractal_order, db.gazetteer.transect_order
    ]

    if 'keywords' in request.get_vars and request.vars.keywords != '':
        qry = SQLFORM.build_query(sfields, keywords=request.vars.keywords)
    else:
        qry = db.gazetteer

    # get the (selected) rows and turn them into geojson, ordering them
    # so that the bottom ones get added to the leaflet map first
    rws = db(qry).select(
        db.gazetteer.ALL,
        db.gazetteer.wkt_wgs84.st_asgeojson().with_alias('geojson'),
        orderby=db.gazetteer.display_order)

    # Need to put together the tooltip for the gazetteer
    # using a subset of the available columns
    loc = ['<B>' + rw.gazetteer['location'] + '</B></BR>' for rw in rws]
    info = [[
        key + ': ' + str(rw.gazetteer[key]) for key in
        ['type', 'plot_size', 'parent', 'fractal_order', 'transect_order']
        if rw.gazetteer[key] is not None
    ] for rw in rws]

    # combine, removing trailing break
    tooltips = [l + '</BR>'.join(i) for l, i in zip(loc, info)]

    rws = [{
        "type": "Feature",
        "tooltip": tl,
        "geometry": loads_json(r.geojson)
    } for r, tl in zip(rws, tooltips)]

    # provide GPX and GeoJSON downloaders and use the magic
    # 'with_hidden_cols' suffix to allow the Exporter to access
    # fields that aren't shown in the table
    export = dict(gpx_with_hidden_cols=(ExporterGPX, 'GPX'),
                  geojson_with_hidden_cols=(ExporterGeoJSON, 'GeoJson'),
                  csv_with_hidden_cols=False,
                  csv=False,
                  xml=False,
                  html=False,
                  json=False,
                  tsv_with_hidden_cols=False,
                  tsv=False)

    # hide display order from search and export
    db.gazetteer.display_order.readable = False

    form = SQLFORM.grid(db.gazetteer,
                        fields=sfields,
                        csv=True,
                        exportclasses=export,
                        maxtextlength=250,
                        deletable=False,
                        editable=False,
                        create=False,
                        details=False)

    # format the HTML to move the export button into the search console
    # get a button themed link. Check to make sure there is an export menu
    # as it will be missing if a search returns no rows
    exp_menu = form.element('.w2p_export_menu')
    if exp_menu is not None:
        exp_gpx = A("Export GPX",
                    _class="btn btn-default",
                    _href=exp_menu[2].attributes['_href'],
                    _style='padding:6px 12px;line-height:20px')
        exp_geojson = A("Export GeoJSON",
                        _class="btn btn-default",
                        _href=exp_menu[1].attributes['_href'],
                        _style='padding:6px 12px;line-height:20px')
        console = form.element('.web2py_console form')
        console.insert(len(console), CAT(exp_gpx, exp_geojson))

        # get the existing export menu index (a DIV within FORM) and delete it
        export_menu_idx = [x.attributes['_class']
                           for x in form].index('w2p_export_menu')
        del form[export_menu_idx]

    return dict(form=form, sitedata=json(rws))
コード例 #17
0
def api():
    """
    Action args:
    (index/value)
    1: DAL instance name
    3: One of setup/form/query
    5: Tablename
    7: Record id

    WARNING: this api exposes all validator options to the client
    For fine-grained access control, consider applying default
    validator filters

    NOTE: multiple db CRUD is not supported unless you
    write your own validation because it is not
    implemented in Auth.

    WARNING: applying some queries without sanitization might
    expose the system to code injections. You should inspect
    each validator and any other environment object sent
    by the client.

    Only json queries supported.
    TODO: read xml and yaml queries

    Note: Query dicts do not implement .select(args)

    Test equirements
    Logged-in auth user
    This is an example of w2p dict query
    {'second': 0, 'ignore_common_filters': false, 'optional_args': {},
     'first': {'fieldname': 'id', 'tablename': 'auth_user'},
     'op': 'GT'}
     It's equivalent to the server query expression:
     db.auth_user.id > 0

    """

    # CORS basic setup
    if myclientapi.settings.origin:
        response.headers['Access-Control-Allow-Origin'] = \
            myclientapi.settings.origin
        response.headers['Access-Control-Allow-Credentials'] = \
            myclientapi.settings.credentials or 'false'
        response.headers['Access-Control-Allow-Methods'] = \
            myclientapi.settings.methods or ''

    # handle logging
    if myclientapi.settings.log and (not myclientapi.settings.logged):
        myclientapi.settings.log()
        myclientapi.settings.logged = True

    if "_serialized" in request.vars:
        _serialized = request.vars.pop("_serialized")
        try:
            serialized = loads_json(_serialized)
        except (ValueError, TypeError, SyntaxError):
            raise HTTP(500, T("Error retrieving serialized values"))
        for k in request.vars.keys():
            try:
                if k in serialized:
                    request.vars[k] = loads_json(request.vars[k])
            except (ValueError, TypeError, SyntaxError):
                raise HTTP(500, T("Could not parse the %s value") % k)

    if request.args(3) == "setup":
        result = myclientapi.settings.rbac("setup", None, None, None)
        if result[0]:
            schemes = result[1]
        else:
            raise HTTP(403, T("Access denied (no setup rights)"))
        return dict(schemes=schemes, dbnames=schemes.keys())

    elif request.args(3) == "form":
        table = myclientapi.database[request.args(5)]
        record = request.args(7)
        if record:
            if myclientapi.settings.rbac("form", "update", table, record)[0]:
                form = SQLFORM(table, record, deletable=True)
            else:
                raise HTTP(403, T("Access denied (no update rights)"))
        else:
            if myclientapi.settings.rbac("form", "create", table, None)[0]:
                form = SQLFORM(table)
            else:
                raise HTTP(403, T("Access denied (no create rights)"))
        form.process()
        data = form.as_dict(flat=True)
        response.flash = None
        return dict(form=data)

    elif request.args(3) == "query":
        if request.is_gae:
            raise HTTP(500, T("Not implemented"))
        query = myclientapi.database(request.vars.query).query
        result = myclientapi.settings.rbac("query", None, None, query)
        if result[0]:
            if result[1] is None:
                raise HTTP(500, T("Invalid Query"))
            else:
                data = myclientapi.database(result[1]).select().as_dict()
        else:
            raise HTTP(403, T("Access denied (no query rights)"))
        return dict(rows=data)

    elif request.args(3) == "user":
        if request.args(5) == "login":
            if auth.user_id:
                raise HTTP(500, T("There is an already open user session"))
            else:
                result = auth.login_bare(request.vars.username,
                                         request.vars.password)
                if result:
                    message = T("ok")
                    profile = auth.user.as_dict()
                else:
                    message = T("Login failed")
                    profile = None
                return dict(profile=profile, message=message)
        elif request.args(5) == "logout":
            if not auth.is_logged_in():
                raise HTTP(500, T("There is no user session"))
            session.auth = None
            return dict(profile=None, message=T("ok"))
        else:
            raise HTTP(500, T("Not implemented: %s") % request.args(5))
        return dict(result=None)
    else:
        raise HTTP(500, T("Invalid operation %s") % request.args(3))
コード例 #18
0
ファイル: scheduler.py プロジェクト: MSCAOps/parapet
def serverTask(accountId, appId, devPhase, region, kvCheck, pbPath=None):
    # TODO: it would be nice to store these in a nice protected dict
    # and then write out the key to disk only for running the playbook...
    # We could put all of them in and identify a default key. Then if we
    # have an entry for the ssh key identified by the host, use that....
    sshKeyFilePath = "/home/ec2-user/.ssh/msca-devops.pem"

    # Directory to write out inventories and playbooks...
    runtimeDir = "/data/parapet/"

    grid = {}
    grid['validHosts'] = {}
    logger.debug("Task UUID: {0}".format(W2P_TASK.uuid))
    logger.debug("Account ID: {0}".format(accountId))
    if int(accountId) == 1:
        logger.debug("Setting account Query to all accounts")
        accountQuery = db.hostInfo.accountNumber > 1
    else:
        accountQuery = db.hostInfo.accountNumber == accountId

    logger.debug("Application: '{0}'".format(appId))
    if appId == "All Applications":
        appQuery = db.hostInfo.app.like('%')
    else:
        appQuery = db.hostInfo.app == appId

    logger.debug("DevPhase: {0}".format(devPhase))
    if len(devPhase) > 1:
        devQuery = db.hostInfo.devPhase == devPhase
    else:
        logger.debug("Setting devPhase to %")
        devQuery = db.hostInfo.devPhase.like('%')

    logger.debug("Region: {0}".format(region))
    if len(region) > 1:
        regionQuery = db.hostInfo.region == region
    else:
        logger.debug("Setting region to %")
        regionQuery = db.hostInfo.region.like('%')

    logger.debug("hostFilter: {0}".format(kvCheck))
    hostFilter = json.loads(kvCheck)
    try:
        hostFilter['awsInfo']['ec2_state'] = 'running'
    except KeyError:
        hostFilter['awsInfo'] = {}
        hostFilter['awsInfo']['ec2_state'] = 'running'
    logger.debug("HF: {0}".format(hostFilter))

    # Get the hosts that match the base query
    dbQuery = ((accountQuery)&(appQuery)&(devQuery)&(regionQuery))
    s = db(dbQuery)
    rows = s.select()

    # Iterate through the core hosts and apply the hostFilter
    for row in rows:
        # Get the host data from the notes field
        hostNotes = json.loads(row['notes'])
        # Verify that all of the things in the hostFilter are true
        for key in hostFilter.keys():
            if hostNotes.has_key(key):
                for check in hostFilter[key].keys():
                    try:
                        if hostFilter[key][check] == hostNotes[key][check]:
                            if grid['validHosts'].has_key(row['instance_id']) is False:
                                # Passes the test, set the AWS instanceID to the databaseID
                                grid['validHosts'][row['instance_id']] = row['id']
                            # If this host has already failed a prior test, don't add it now
                            elif grid['validHosts'][row['instance_id']] is None:
                                pass
                        else:
                            # Host fails the test, set it to None (clean it up later)
                            grid['validHosts'][row['instance_id']] = None
                    except KeyError:
                        # If the host doesn't have a matching key, then it doesn't match the filter
                        grid['validHosts'][row['instance_id']] = None


    # Get rid of the hosts that don't match the hostFilter
    for key in grid['validHosts'].keys():
        if grid['validHosts'][key] is None:
            del grid['validHosts'][key]

    logger.debug("HostIDs: {0}".format(grid['validHosts'].values()))
    logger.debug("This search found {0} hosts".format(len(grid['validHosts'])))

    # Download and parse playbook file here... write it out as:
    #  runtimeDir/Task_UUID.yml
    # use serializers.loads_yaml()
    if pbPath:
        pbData = serializers.loads_yaml(urllib2.urlopen(pbPath).read())
        hostGroup = pbData[0]['hosts']
        fileutils.write_file(os.path.join(runtimeDir,W2P_TASK.uuid+".yml"),serializers.yaml(pbData))



    # Generate inventory file
    #  runtimeDir/Task_UUID.inv
    # Need to parse out teh playbook file first to determine what group we should put the hosts in.
    invHosts = "[{0}]\n".format(hostGroup)
    for row in db(db.hostInfo.id.belongs(grid['validHosts'].values())).select():
        hostNotes = serializers.loads_json(row.notes)
        if utils.is_valid_ip_address(hostNotes['awsInfo']['ec2_ip_address']):
            sshKeyFilePath = os.path.join(runtimeDir,W2P_TASK.uuid+hostNotes['awsInfo']['ec2_ip_address']+".key")
            try:
                thisHostKey = serverTask_config.keyData[hostNotes['awsInfo']['ec2_key_name']] 
                sshKeyFilePath = os.path.join(runtimeDir,W2P_TASK.uuid+hostNotes['awsInfo']['ec2_ip_address']+".key")
            except KeyError:
                logger.debug("Unable to find a key named {0} using default".format(hostNotes['awsInfo']['ec2_key_name']))
                thisHostKey = serverTask_config.keyData['default']

            fileutils.write_file(sshKeyFilePath,thisHostKey)
            os.chmod(sshKeyFilePath,0600)

            thisHostString = "{0} ansible_ssh_host={1} ansible_ssh_private_key_file={2} ansible_ssh_user=ec2-user\n".format(row.instance_id,hostNotes['awsInfo']['ec2_ip_address'],sshKeyFilePath)
            invHosts = "{0} {1}".format(invHosts,thisHostString)
        else:
            logger.warn("{0} is not a valid IP address".format(hostNotes['awsInfo']['ec2_ip_address']))

    fileutils.write_file(os.path.join(runtimeDir,W2P_TASK.uuid+".inv"),invHosts)

    # Run the task
    cmdLine = "/usr/bin/ansible-playbook -e 'parapetServer=true' -vvv -i {invFile} {pbFile}".format(invFile=os.path.join(runtimeDir,W2P_TASK.uuid+".inv"),pbFile=os.path.join(runtimeDir,W2P_TASK.uuid+".yml"))

    logger.debug("{0}".format(cmdLine))

    #ansibleOut = subprocess.Popen(cmdLine, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0]
    ansible = subprocess.Popen(cmdLine,shell=True,cwd=runtimeDir,stdout=subprocess.PIPE)
    while ansible.poll() is None:
        # Only 1 newline??
        print ansible.stdout.readline(),

    #logger.info(ansibleOut)
    #print ansibleOut
    try:
        keyFiles = fileutils.listdir(runtimeDir,expression="^"+W2P_TASK.uuid+".*\.key$", drop=False)
        for keyFile in keyFiles:
            logger.debug("Removing: {0}".format(keyFile))
            fileutils.recursive_unlink(keyFile)
    except:
        logger.error("Unable to remove key files")
    return 0