Example #1
0
 def __set_key(key, value):
     try:
         uwsgi.cache_update(key, value)
         return True
     except Exception:
         pass
     return False
Example #2
0
def memrefresh(feedurl):
	value = jsonld_from_feedparser(loaded_feed(unquote(feedurl),repo,True).feed)
	if cache_exists(feedurl):
		cache_update(feedurl,value,3600*24)
	else:
		cache_set(feedurl,value,3600*24)
	return value
Example #3
0
 def test_big_update(self):
     self.assertTrue(uwsgi.cache_set('key1', 'X' * 40, 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_update('key1', 'X' * 10, 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_del('key1', 'items_4_10'))
     self.assertIsNone(uwsgi.cache_update('key1', 'X' * 51, 0,
                                          'items_4_10'))
     self.assertTrue(uwsgi.cache_update('key1', 'X' * 50, 0, 'items_4_10'))
Example #4
0
 def set(self, key, value, expires=0):
     key = str(key)
     value = pickle.dumps(value)
     if uwsgi.cache_exists(key, self.cachename):
         uwsgi.cache_update(key, value, expires, self.cachename)
     else:
         uwsgi.cache_set(key, value, expires, self.cachename)
     self._keys.add(key)
Example #5
0
def application(environ, start_response):
    sr = StartResponseWrapper(start_response)
    if environ['PATH_INFO'].startswith('/search'):
        return gw(environ, sr)
    page = ef(environ, sr)
    if sr.status == 200:
        uwsgi.cache_update(environ['PATH_INFO'], ''.join(page))
    return page
Example #6
0
def application(environ, start_response):
    sr = StartResponseWrapper(start_response);
    if environ['PATH_INFO'].startswith('/search'):
        return gw(environ, sr)
    page = ef(environ, sr)
    if sr.status == 200:
        uwsgi.cache_update(environ['PATH_INFO'], ''.join(page))
    return page
Example #7
0
def set_value(key, value):
    if UWSGI:
        if uwsgi.cache_exists(key):
            uwsgi.cache_update(key, pickle.dumps(value))
        else:
            uwsgi.cache_set(key, pickle.dumps(value))
    else:
        _cache[key] = value
def kill():
    try:
        uwsgi.cache_update('stop_threads', 'True')
        app.logger.info(
            f"Application spotify mise en pause avec : {uwsgi.cache_get('stop_threads')}"
        )
    except:
        app.logger.info("Couldn't kill process")
    return redirect(url_for('projects.projects'))
Example #9
0
 def cache_update(self, key, value, expire=None, cache_server=None):
     ''' '''
     if expire != None:
         return uwsgi.cache_update(key, value, expire)
     elif cache_server != None:
         return uwsgi.cache_update(key, value, cache_server)
     elif expire != None and cache_server != None:
         return uwsgi.cache_update(key, value, expire, cache_server)
     else:
         return uwsgi.cache_update(key, value)
Example #10
0
def reset_debug_level(level):
    import uwsgi
    if level == '-1':
        uwsgi.cache_del("DEBUG")
    else:
        if uwsgi.cache_exists("DEBUG"):
            uwsgi.cache_update("DEBUG", level)
        else:
            uwsgi.cache_set("DEBUG", level)
    return redirect(url_for('tell_me_if_im_going_to_die', lat=39.9708657, lon=-75.1427425, meters=1000))
Example #11
0
def keep_alive(source_hash):
    data = {"source_hash": source_hash}

    reg_dump = cache_man.cache_get(source_hash)
    if reg_dump:
        cache_man.cache_update(source_hash, reg_dump,
                               settings.DEFAULT_CACHE_TTL)
        return data, 200

    return data, 404
def increment_request_count(user_id):
    worker_id = str(uwsgi.worker_id())

    if uwsgi.cache_get(worker_id):
        c = int(uwsgi.cache_get(worker_id))
        c += 1
        uwsgi.cache_update(worker_id, str(c))
    else:
        uwsgi.cache_set(worker_id, '0')

    return f"user_id:{user_id}:workder_id:{worker_id}:request_number:{uwsgi.cache_get(worker_id).decode()}"
Example #13
0
def getAllTriggersAlarming():
    triggerCached = cache_get('triggerTelao',
                              cache_options['triggerGet']['name'])
    if triggerCached:
        return json.loads(triggerCached)
    elif cache_get('updatingCache',
                   cache_options['updates']['name']) == 'True':
        while cache_get('updatingCache',
                        cache_options['updates']['name']) == 'True':
            time.sleep(0.3)
        else:
            return json.loads(
                cache_get('triggerTelao', cache_options['updates']['name']))
    else:
        if cache_exists('updatingCache', cache_options['updates']['name']):
            cache_update('updatingCache', 'True',
                         cache_options['updates']['expiration_time'],
                         cache_options['updates']['name'])
        else:
            cache_set('updatingCache', 'True',
                      cache_options['updates']['expiration_time'],
                      cache_options['updates']['name'])

        admin = Admin()
        zbx_admin_token = admin.auth()

        triggers = fowardZ.sendToZabbix(method='trigger.get',
                                        params={
                                            'selectHosts': ["name"],
                                            'selectGroups': ['groups'],
                                            'selectLastEvent':
                                            ['lastEvent', 'acknowledged'],
                                            'expandComment':
                                            1,
                                            'expandDescription':
                                            1,
                                            'only_true':
                                            1,
                                            'output':
                                            'extend'
                                        },
                                        auth=zbx_admin_token)

        cache_set('triggerTelao', json.dumps(triggers),
                  cache_options['triggerGet']['expiration_time'],
                  cache_options['triggerGet']['name'])
        cache_update('updatingCache', 'False',
                     cache_options['updates']['expiration_time'],
                     cache_options['updates']['name'])

    return triggers
Example #14
0
 def sighandler(signum):
     now = int(time.time())
     key = 'scheduler_call_time_signal_' + str(signum)
     uwsgi.lock()
     try:
         updating = uwsgi.cache_get(key)
         if updating is not None:
             updating = int.from_bytes(updating, 'big')
             if now - updating < delay:
                 return
         uwsgi.cache_update(key, now.to_bytes(4, 'big'))
     finally:
         uwsgi.unlock()
     func(*args)
Example #15
0
def register(source_hash, method_name, packed_data):
    logging.info("{}: {}".format(request.method, request.url))
    data = rpc_data_unpack(request.get_data())
    logging.debug(str(data))

    fn_data = {method_name: data}

    if cache_man.cache_exists(source_hash):
        reg_dump = pickle.loads(cache_man.cache_get(source_hash))
        reg_dump.update(fn_data)
        cache_man.cache_update(source_hash, pickle.dumps(reg_dump),
                               settings.DEFAULT_CACHE_TTL)
    else:
        cache_man.cache_set(source_hash, pickle.dumps(fn_data),
                            settings.DEFAULT_CACHE_TTL)
Example #16
0
def get_best_score(game_data):

    bestscorecache = uwsgi.cache_get('bestscore')
    if bestscorecache is None:
          logger.debug('Load scores')
          scores = storage.get_top_scores(10) 
          uwsgi.cache_update('bestscore',json.dumps(scores).encode('utf-8'))
          bestscorecache = uwsgi.cache_get('bestscore')

    scorehash = hashlib.md5(bestscorecache).hexdigest()
    if scorehash != game_data.get('bestscore',''):
           logger.debug('Send new score to client')
           game_data['bestscore'] = scorehash
           return json.loads( bestscorecache.decode('utf-8') );

    return None
def callback():
    startup.getUserToken(request.args.get('code'))
    if not uwsgi.cache_exists('isRunning'):
        app.logger.info(
            "Creating new thread for refreshing spotify token and user stats.")
        uwsgi.cache_set('isRunning', 'True')
        uwsgi.cache_set('stop_threads', 'False')
        sp_t = spotify_thread(2500, "Thread-spotify")
        sp_t.start()
    try:
        if uwsgi.cache_get('isRunning').decode(
                'utf-8') == 'True' and uwsgi.cache_get('stop_threads').decode(
                    'utf-8') == 'True':
            app.logger.info("Relancement de l'application spotify")
            uwsgi.cache_update('stop_threads', 'False')
    except AttributeError:
        app.logger.error(
            f"La variable isRunning ou stop_threads n'est pas initialisée, valeurs : ir:{uwsgi.cache_get('isRunning')} et st:{uwsgi.cache_get('stop_threads')}"
        )
    list_time_range = ['short_term', 'medium_term', 'long_term']
    list_type = ['artists', 'tracks']
    dict_index = {
        'short_term_artists': 1,
        'medium_term_artists': 2,
        'long_term_artists': 3,
        'short_term_tracks': 4,
        'medium_term_tracks': 5,
        'long_term_tracks': 6
    }

    for type in list_type:
        for time_range in list_time_range:
            set_analytics_data(
                dict_index[f"{time_range}_{type}"],
                json.dumps(
                    json.loads(
                        get_users_top(
                            startup.getAccessToken()[1],
                            type,
                            time_range,
                        ))), time_range, type)

    app.logger.info(
        f"All the threads are listed below : {[thread.name for thread in threading.enumerate()]}"
    )

    return redirect(url_for('project_spotify.spotify'))
Example #18
0
    def __call__(self, request):
        if settings.CACHE_FORMATS and UWSGI_ENABLED:
            current_data_version = RootServer.objects.order_by(
                '-last_successful_import')
            current_data_version = current_data_version.values_list(
                'last_successful_import', flat=True)
            current_data_version = str(current_data_version[0])
            cache_data_version = uwsgi.cache_get('cache_data_version')
            if cache_data_version:
                cache_data_version = cache_data_version.decode('utf-8')
            if current_data_version != cache_data_version:
                logger.info(
                    "clearing cache, current_data_version: {}, cache_data_version: {}"
                    .format(current_data_version, cache_data_version))
                cache.clear()
                uwsgi.cache_update('cache_data_version', current_data_version)

        response = self.get_response(request)
        return response
Example #19
0
def _create_worker_pool(receipt, test_data, max_procs, stop_event):
    """Parse provided test data and ensure that all test options are properly formatted
    before passing off to the worker procs in the pool. Once the tests have been completed;
    update the UWSGI cache-key with the results.

    Args:
        receipt     (str)  : The UWSGI cache-key to append test results to.
        test_data   (dict) : The tests to execute.
        max_procs   (int)  : The maximum number of parallel processes to be used in the worker pool
        stop_event  (class): Threading event class used to stop the daemon upon completion.

    """
    tests = []
    test_status = {"receipt": receipt, "is_running": True, "results": {}}
    for (test_type, test_options) in test_data.items():
        for options in test_options:
            # Ensure that all options are lowercase.
            options = {key.lower(): value for key, value in options.items()}
            tests.append({"type": test_type, "options": options})
            if test_type not in test_status["results"]:
                test_status["results"][test_type] = []
    uwsgi.cache_update(receipt, json.dumps(test_status), 600, "receipts")
    # Execute tests in parallel.
    if len(tests) < max_procs:
        pool = multiprocessing.Pool(len(tests))
    else:
        pool = multiprocessing.Pool(max_procs)
    result = pool.map(_worker, tests)
    # Wait for ALL results before terminating the pool.
    pool.close()
    pool.join()
    # Parse test results and append them to our test status.
    for test in result:
        test_status["results"][test["type"]].append(test["results"])
    test_status["is_running"] = False
    # Update the client's receipt with the current test status including test results.
    uwsgi.cache_update(receipt, json.dumps(test_status), 600, "receipts")
    # Ensure that the daemon is stopped after cache update.
    stop_event.set()
 def _cache_add_msg(self, worker_id, data):
     msg_key = None
     for msg_id in range(0, 10):
         msg_key = self.cache_msg_key % (worker_id, msg_id)
         if uwsgi.cache_exists(msg_key, self.cache) is None:
             break
         msg_key = None
     if msg_key is None:
         msg_key = self.cache_msg_key % (worker_id, 0)
         logger.warning(
             'Cached queue for worker %s is full, overwrite data' %
             worker_id)
     logger.debug('Store message from worker %s to %s' %
                  (self.worker_id, msg_key))
     return uwsgi.cache_update(
         msg_key, pickle.dumps(data),
         self.cache_timeout if worker_id else self.cache_fallback_timeout,
         self.cache)
Example #21
0
def loop():
    while True:
        key = uwsgi.mule_get_msg()
        key = key.decode('utf-8')
        ids = key.split('_')
        uwsgi.cache_set(key, 'inprogress')
        try:
            result = bfs(ids[0], ids[1])
        except:
            uwsgi.cache_update(key, 'fail')
        else:
            if result:
                uwsgi.cache_update(key, 'found')
                print(key)
            else:
                uwsgi.cache_update(key, 'notfound')
Example #22
0
def run(debug, host, port, close, memory, input, output, arango, user, project,
        limit, pictures, summary, relations, corefs, newgraph, documentedges):
    uwsgi.cache_update('busy', b'1')

    if debug:
        logging.basicConfig(level=logging.DEBUG)
        logging.debug("Debug on.")
    else:
        logging.basicConfig(level=logging.INFO)

    nlp_bytes = None
    nlp_bytes = uwsgi.cache_get('nlp')

    # Set progress bar start parameters
    if nlp_bytes:
        init_time = 2
    else:
        init_time = 10

    if pictures or summary:
        nlp_time = 60
    else:
        nlp_time = 80

    yield "data:1\n\n"

    # If standford corenlp server host and port given use that, otherwise start a new instance through python wrapper
    if host and port:
        if nlp_bytes:
            temp_nlp = pickle.loads(nlp_bytes)
            temp_nlp.close()

        nlp = StanfordCoreNLP(host, port)
        uwsgi.cache_update('nlp', pickle.dumps(nlp))
        logging.debug("nlp to cache: host {}".format(uwsgi.cache_get('nlp')))
    elif nlp_bytes:
        nlp = pickle.loads(nlp_bytes)
        logging.debug("nlp from cache: {}".format(uwsgi.cache_get('nlp')))
    else:
        nlp = StanfordCoreNLP(r'../deps/stanford-corenlp/',
                              memory=memory,
                              timeout=200000,
                              quiet=not debug)
        uwsgi.cache_update('nlp', pickle.dumps(nlp))
        logging.debug("nlp to cache: file {}".format(uwsgi.cache_get('nlp')))

    DOC_CHUNK_SIZE = 10000

    # Initialise corenlp properties, s3 bucket connection, and doc count for progress bar
    data, n_items, properties, s3 = init(input,
                                         output,
                                         nlp,
                                         relations=relations,
                                         corefs=corefs,
                                         chunk_size=DOC_CHUNK_SIZE,
                                         limit=limit)
    logging.debug("items to process: {}".format(n_items))

    logging.debug("Loading CoreNLP models...")

    # Load corenlp models in separate thread to allow to send regular pings to the frontend
    server_init_thread = Thread(target=nlp.annotate, args=("", properties))
    server_init_thread.start()

    while server_init_thread.is_alive():
        time.sleep(30)
        yield "data:1\n\n"
    else:
        server_init_thread.join()
        yield "data:" + str(init_time) + "\n\n"

    # Create or load existing networkx graph object for this project
    graph_path = os.path.join(output, user, project, "nlp_outputs",
                              'graph_temp.pkl')
    if not newgraph:
        if output[:5] == 's3://' and s3.exists(graph_path):
            with s3.open(graph_path, 'rb') as f:
                logging.debug("Reading existing graph...")
                G = nx.read_gpickle(f)
        elif os.path.isfile(graph_path):
            G = nx.read_gpickle(graph_path)
        else:
            G = nx.MultiGraph()
    else:
        if arango:
            r = requests.delete("http://" + arango + "/ingest/" + user + "/" +
                                project + "/")
        G = nx.MultiGraph()

    # Main NLP parsing loop. Run corenlp annotator pipeline, resolve coreferences and extract relations. Then load into networkx graph
    i = 0
    for document in parse_docs(data,
                               input,
                               output,
                               user,
                               project,
                               nlp,
                               properties,
                               chunk_size=DOC_CHUNK_SIZE,
                               limit=limit,
                               s3=s3):
        yield "data:" + str(int(i / n_items * nlp_time) + init_time) + "\n\n"

        if corefs:
            resolve_coreferences(document[1])
            yield "data:" + str(int(i / n_items * nlp_time) +
                                init_time) + "\n\n"

        for r in make_entity_relationships(document[0],
                                           document[1],
                                           document[2],
                                           document[3],
                                           relations=relations,
                                           documentedges=documentedges):
            key_suffix = r.semantic_type or ""
            G.add_edge(r.entity1._key,
                       r.entity2._key,
                       key=r.type + key_suffix,
                       source_file=r.source_file,
                       word_dist=r.word_dist,
                       document_id=r.document_id,
                       document_date=r.document_date,
                       from_char_offset=(r.e1_char_start, r.e1_char_end),
                       to_char_offset=(r.e2_char_start, r.e2_char_end),
                       semantic_type=r.semantic_type,
                       label_first=r.entity1.label_orig,
                       label_second=r.entity2.label_orig)

            nodes = []
            elements1 = r.entity1.__dict__
            nodes.append((r.entity1._key, elements1))
            elements2 = r.entity2.__dict__
            nodes.append((r.entity2._key, elements2))

            G.add_nodes_from(nodes)
        yield "data:" + str(int(i / n_items * nlp_time) + init_time) + "\n\n"
        i += 1

    # Close the NLP server if required. Keep open to avoid model loading next time
    if close:
        nlp.close()
        uwsgi.cache_del('nlp')

    logging.debug("Calculating same sentence centrality...")
    set_type_centrality(G, "same_sentence")

    if documentedges:
        yield "data:" + str(init_time + nlp_time + 2) + "\n\n"
        set_type_centrality(G, "same_document")
        yield "data:" + str(init_time + nlp_time + 5) + "\n\n"
    else:
        yield "data:" + str(init_time + nlp_time + 5) + "\n\n"

    # Write graph object to JSON representation
    out_data = json_graph.node_link_data(G)

    # Serialise and write the graph object for use in next upload
    if output[:5] == 's3://':
        with s3.open(graph_path, 'wb') as f:
            nx.write_gpickle(G, f)
    else:
        nx.write_gpickle(G, graph_path)

    del G

    # remove and rename output variables to fit data api requirements
    out_data.pop('directed')
    out_data.pop('multigraph')

    out_data['vertices'] = out_data.pop('nodes')
    out_data['edges'] = out_data.pop('links')

    # Run wikipedia lookups of thumbnail urls and article summaries
    if pictures or summary:
        processes = []
        with ThreadPoolExecutor(max_workers=None) as executor:
            for idx, v in enumerate(out_data['vertices']):
                v.pop('id')

                if v['_key'].split("_")[-1] not in ('LOCATION', 'MISC',
                                                    'ORGANIZATION', 'PERSON',
                                                    'COREF'):
                    url = 'https://en.wikipedia.org/wiki/' + v['_key']
                    processes.append(
                        executor.submit(getWikiImageSummary, url, pictures,
                                        summary, idx))

            i = 0
            for task in as_completed(processes):
                logging.debug(
                    "Finished processing vertex: {} out of {}".format(
                        i + 1, len(processes)))
                imageurl, summarytext, idx = task.result()
                out_data['vertices'][idx]['image_url'], out_data['vertices'][
                    idx]['summary'] = imageurl, summarytext
                if i % 10 == 0:
                    yield "data:" + str(
                        int(i / len(processes) * (80 - nlp_time)) + nlp_time +
                        init_time + 5) + "\n\n"
                i += 1

    # More renaming to fit data api requirements
    for e in out_data['edges']:
        e['_from'] = "vertices/" + clean_label(e.pop('source'))
        e['_to'] = "vertices/" + clean_label(e.pop('target'))
        e['type'] = e.pop('key')[:13]
        e['_key'] = str(uuid.uuid4())

    yield "data:96\n\n"

    # Either load data into arango db, or save json representation to file system or s3
    LINE_LIMIT = 100000

    if arango:
        logging.debug("sending: {}, {}, {}".format(arango, user, project))

        send_to_arango(out_data,
                       arango,
                       user,
                       project,
                       LINE_LIMIT,
                       doc_type="vertices")
        yield "data:97\n\n"

        send_to_arango(out_data,
                       arango,
                       user,
                       project,
                       LINE_LIMIT,
                       doc_type="same_sentence")

        yield "data:98\n\n"

        if documentedges:
            logging.debug("adding document edges")
            send_to_arango(out_data,
                           arango,
                           user,
                           project,
                           LINE_LIMIT,
                           doc_type="same_document")

    else:
        edges_ss = [
            e for e in out_data['edges'] if e['type'] == "same_sentence"
        ]

        if documentedges:
            edges_sd = [
                e for e in out_data['edges'] if e['type'] == "same_document"
            ]

        write_list_in_chunks(out_data['vertices'], LINE_LIMIT // 10, output,
                             user, project, 'vertices')
        yield "data:97\n\n"
        write_list_in_chunks(edges_ss, LINE_LIMIT, output, user, project,
                             'edges_ss')
        yield "data:98\n\n"
        if documentedges:
            write_list_in_chunks(edges_sd, LINE_LIMIT, output, user, project,
                                 'edges_sd')

    uwsgi.cache_del('busy')
    yield "data:100\n\n"
Example #23
0
def get_layermetadata(layerids,kmiserver="https://kmi.dbca.wa.gov.au/geoserver",results={}):
    multiple_layers = True
    if isinstance(layerids,basestring):
        layerids = [layerids]
        multiple_layers = False
    #group layers against layer workspace
    layers = {}
    for layerid in layerids:
        layerid = layerid.strip()
        #check whether it is cached or not
        key = layermetadatakey(layerid)
        if uwsgi.cache_exists(key):
            try:
                metadata = uwsgi.cache_get(key)
                if metadata:
                    if layerid in results:
                        results[layerid].update(json.loads(metadata))
                    else:
                        results[layerid] = json.loads(metadata)
                    #print("Retrieve the metadata from cache for layer ({})".format(layerid))
                    continue
            except:
                pass

        layer = layerid.split(":")

        if len(layer) == 1:
            #no workspace
            layer_ws = ""
            layer = layer[0]
        else:
            layer_ws = layer[0]
            layer = layer[1]

        if layer_ws not in layers:
            layers[layer_ws] = [layer]
        else:
            layers[layer_ws].append(layer)


    if layers:
        session_cookie = settings.get_session_cookie()
        kmiserver = get_kmiserver(kmiserver)
        #find the layer's metadata 
        url = None
        for layer_ws,layers in layers.iteritems():
            if layer_ws:
                url = "{}/{}/wms?service=wms&version=1.1.1&request=GetCapabilities".format(kmiserver,layer_ws)
            else:
                url = "{}/wms?service=wms&version=1.1.1&request=GetCapabilities".format(kmiserver)
    
            res = requests.get(
                url,
                verify=False,
                cookies=session_cookie
            )
            res.raise_for_status()
    
            tree = ET.fromstring(res.content)

            capability = tree.find('Capability')
            if not len(capability):
                raise Exception("getCapability failed")
            kmi_layers = capability.findall("Layer")
            while kmi_layers:
                kmi_layer = kmi_layers.pop()
                name = get_child_value(kmi_layer,"Name")
                
                if name:
                    try:
                        index = layers.index(name)
                    except:
                        index = -1
                    if index >= 0:
                        #this layer's metadata is requsted by the user
                        if layer_ws:
                            layerid = "{}:{}".format(layer_ws,name)
                        else:
                            layerid = name

                        if layerid in results:
                            result = results[layerid]
                        else:
                            result = {"id":layerid}
                            results[layerid] = result

                        del layers[index]
    
                        result["title"] = get_child_value(kmi_layer,"Title")
                        result["abstract"] = get_child_value(kmi_layer,"Abstract")
                        result["srs"] = get_child_value(kmi_layer,"SRS")
                        bbox = kmi_layer.find("LatLonBoundingBox")
                        if bbox is  not None:
                            result["latlonBoundingBox"] = [float(bbox.attrib["miny"]),float(bbox.attrib["minx"]),float(bbox.attrib["maxy"]),float(bbox.attrib["maxx"])]
                        else:
                            result["latlonBoundingBox"] = None
                        for bbox in kmi_layer.findall("BoundingBox"):
                            result["latlonBoundingBox_{}".format(bbox.attrib["SRS"].upper())] = [float(bbox.attrib["miny"]),float(bbox.attrib["minx"]),float(bbox.attrib["maxy"]),float(bbox.attrib["maxx"])]
    
                        #cache it for 6 hours
                        key = layermetadatakey(result["id"])
                        try:
                            if uwsgi.cache_exists(key):
                                uwsgi.cache_update(key, json.dumps(result),6 * 3600)
                            else:
                                uwsgi.cache_set(key, json.dumps(result),6 * 3600)
                        except:
                            pass
                            
                        #print("Retrieve the metadata from kmi for layer ({})".format(result["id"]))
    
                        if len(layers):
                            continue
                        else:
                            #already find metadata for all required layers
                            break
                sub_layers = kmi_layer.findall("Layer")
                if sub_layers:
                    kmi_layers += sub_layers
            
            if len(layers) == 1:
                if layer_ws:
                    raise Exception("The layer({}:{}) Not Found".format(layer_ws,layers[0]))
                else:
                    raise Exception("The layer({}) Not Found".format(layers[0]))
            elif len(layers) > 1:
                if layer_ws:
                    raise Exception("The layers({}) Not Found".format(",".join(["{}:{}".format(layer_ws,l) for l in layers])))
                else:
                    raise Exception("The layers({}) Not Found".format(",".join(layers)))

    if multiple_layers:
        return results
    else:
        return results[layerids[0]]
Example #24
0
def _get_profile(app):
    # get app profile
    profile = None
    appPath = os.path.join(settings.DIST_PATH, "{}.js".format(app))
    if not os.path.exists(appPath):
        raise Exception("Application({}) Not Found".format(app))
    key = "{}_profile".format(app)
    profileChanged = False
    if uwsgi.cache_exists(key):
        profile = uwsgi.cache_get(key)
    if profile:
        profile = json.loads(profile)
        if repr(
                os.path.getmtime(appPath)
        ) != profile["mtime"] or os.path.getsize(appPath) != profile["size"]:
            profileChanged = True
            profile = None
    if not profile:
        file_data = None
        with open(appPath, "rb") as f:
            file_data = f.read()
        m = profile_re.search(file_data.decode("utf-8"))
        profile = {
            'mtime': repr(os.path.getmtime(appPath)),
            'size': os.path.getsize(appPath),
            'profile': demjson.decode(m.group("profile") if m else "{}")
        }
        m = hashlib.md5()
        m.update(file_data)
        profile['profile']['build']['md5'] = base64.urlsafe_b64encode(
            m.digest()).decode().rstrip("=")
        file_data = None
        if profileChanged:
            uwsgi.cache_update(key, json.dumps(profile))
        else:
            uwsgi.cache_set(key, json.dumps(profile))

    profile["profile"]["dependents"] = {}
    # get vendor md5
    vendorPath = os.path.join(settings.DIST_PATH, "vendor.js")
    if not os.path.exists(vendorPath):
        raise Exception("Vendor library Not Found")
    key = "{}_profile".format("vendor")
    profileChanged = False
    vendorProfile = None
    if uwsgi.cache_exists(key):
        vendorProfile = uwsgi.cache_get(key)
    if vendorProfile:
        vendorProfile = json.loads(vendorProfile)
        if repr(os.path.getmtime(
                vendorPath)) != vendorProfile["mtime"] or os.path.getsize(
                    vendorPath) != vendorProfile["size"]:
            profileChanged = True
            vendorProfile = None
    if not vendorProfile:
        m = hashlib.md5()
        with open(vendorPath, "rb") as f:
            m.update(f.read())
        vendorProfile = {
            'mtime': repr(os.path.getmtime(vendorPath)),
            'size': os.path.getsize(vendorPath),
            'vendorMD5':
            base64.urlsafe_b64encode(m.digest()).decode().rstrip("=")
        }
        if profileChanged:
            uwsgi.cache_update(key, json.dumps(vendorProfile))
        else:
            uwsgi.cache_set(key, json.dumps(vendorProfile))
    profile["profile"]["dependents"]["vendorMD5"] = vendorProfile["vendorMD5"]
    # get env profile
    envPath = os.path.join(settings.BASE_DIST_PATH, 'release', 'static', 'js',
                           "{}.env.js".format(settings.ENV_TYPE))
    if not os.path.exists(envPath):
        raise Exception("'{}.env.js' is missing.".format(settings.ENV_TYPE))
    else:
        key = "{}_{}_profile".format("env", settings.ENV_TYPE)
        profileChanged = False
        envProfile = None
        if uwsgi.cache_exists(key):
            envProfile = uwsgi.cache_get(key)
        if envProfile:
            envProfile = json.loads(envProfile)
            if repr(os.path.getmtime(envPath)) != envProfile[
                    "mtime"] or os.path.getsize(envPath) != envProfile["size"]:
                profileChanged = True
                envProfile = None
        if not envProfile:
            m = hashlib.md5()
            with open(envPath, "rb") as f:
                m.update(f.read())

            envProfile = {
                'mtime': repr(os.path.getmtime(envPath)),
                'size': os.path.getsize(envPath),
                'envMD5':
                base64.urlsafe_b64encode(m.digest()).decode().rstrip("=")
            }
            if profileChanged:
                uwsgi.cache_update(key, json.dumps(envProfile))
            else:
                uwsgi.cache_set(key, json.dumps(envProfile))

        profile["profile"]["dependents"]["envMD5"] = envProfile["envMD5"]
        profile["profile"]["envType"] = settings.ENV_TYPE
    # get style profile
    stylePath = os.path.join(settings.BASE_DIST_PATH, 'release', 'static',
                             'css', "style.css")
    if not os.path.exists(stylePath):
        raise Exception("'style.css' is missing.")
    else:
        key = "style_profile"
        profileChanged = False
        styleProfile = None
        if uwsgi.cache_exists(key):
            styleProfile = uwsgi.cache_get(key)
        if styleProfile:
            styleProfile = json.loads(styleProfile)
            if repr(os.path.getmtime(
                    stylePath)) != styleProfile["mtime"] or os.path.getsize(
                        stylePath) != styleProfile["size"]:
                profileChanged = True
                styleProfile = None
        if not styleProfile:
            m = hashlib.md5()
            with open(stylePath, "rb") as f:
                m.update(f.read())
            styleProfile = {
                'mtime':
                repr(os.path.getmtime(stylePath)),
                'size':
                os.path.getsize(stylePath),
                'styleMD5':
                base64.urlsafe_b64encode(m.digest()).decode().rstrip("=")
            }
            if profileChanged:
                uwsgi.cache_update(key, json.dumps(styleProfile))
            else:
                uwsgi.cache_set(key, json.dumps(styleProfile))
        profile["profile"]["dependents"]["styleMD5"] = styleProfile["styleMD5"]
    return profile["profile"]
Example #25
0
 def test_big_item(self):
     self.assertIsNone(uwsgi.cache_update('key1', 'HELLOHELLOHELLOHEL', 0, 'items_17'))
     self.assertTrue(uwsgi.cache_update('key1', 'HELLOHELLOHELLOHE', 0, 'items_17'))
Example #26
0
def hello_timer(num):
    i = int(uwsgi.cache_get('Service2Timer'))
    i += 1
    uwsgi.cache_update('Service2Timer', str(i))
Example #27
0
 def test_two_items_using_four_blocks(self):
     self.assertTrue(uwsgi.cache_update('key1', 'HE', 0, 'items_2'))
     self.assertTrue(uwsgi.cache_update('key2', 'LL', 0, 'items_2'))
     self.assertTrue(uwsgi.cache_del('key1', 'items_2'))
     self.assertIsNone(uwsgi.cache_update('key1', 'HEL', 0, 'items_2'))
     self.assertTrue(uwsgi.cache_update('key1', 'HE', 0, 'items_2'))
Example #28
0
def counter(self, cookie, request, response):
    i = int(uwsgi.cache_get('Service2Counter'))
    i += 1
    uwsgi.cache_update('Service2Counter', str(i))
    return "{0} {1}".format(i, uwsgi.cache_get('Service2Timer'))
Example #29
0
 def __setitem__(self, item, value):
     uwsgi.cache_update(item, value)
Example #30
0
def get_layerdefinition(layerids, kmiserver=settings.KMI_SERVER, results={}):
    kmiserver = get_kmiserver(kmiserver)

    multiple_layers = True
    if isinstance(layerids, basestring):
        layerids = [layerids]
        multiple_layers = False
    #group layers against layer workspace
    layers = {}
    for layerid in layerids:
        layerid = layerid.strip()
        #check whether it is cached or not
        key = layerdefinitionkey(layerid)
        if uwsgi.cache_exists(key):
            try:
                definitiondata = uwsgi.cache_get(key)
                if definitiondata:
                    if layerid in results:
                        results[layerid].update(json.loads(definitiondata))
                    else:
                        results[layerid] = json.loads(definitiondata)
                    continue
            except:
                pass

        layer = layerid.split(":")

        if len(layer) == 1:
            #no workspace
            layer_ws = ""
            layer = layer[0]
        else:
            layer_ws = layer[0]
            layer = layer[1]

        if layer_ws not in layers:
            layers[layer_ws] = [layerid]
        else:
            layers[layer_ws].append(layerid)

    if layers:
        kmiserver = get_kmiserver(kmiserver)
        session_cookie = settings.get_session_cookie()

        url = None
        for layer_ws, layers in layers.iteritems():
            if layer_ws:
                url = "{}/{}/wfs?request=DescribeFeatureType&version=2.0.0&service=WFS&outputFormat=application%2Fjson&typeName=".format(
                    kmiserver, layer_ws, ",".join(layers))
            else:
                url = "{}/wfs?request=DescribeFeatureType&version=2.0.0&service=WFS&outputFormat=application%2Fjson&typeName=".format(
                    kmiserver, ",".join(layers))

            res = requests.get(url, verify=False, cookies=session_cookie)
            res.raise_for_status()
            layersdata = res.json()

            for layer in layersdata.get("featureTypes") or []:
                if layer_ws:
                    layerid = "{}:{}".format(layer_ws, layer["typeName"])
                else:
                    layerid = layer["typeName"]
                try:
                    index = layers.index(layerid)
                except:
                    index = -1
                if index >= 0:
                    #this layer's metadata is requsted by the user
                    if layerid in results:
                        result = results[layerid]
                    else:
                        result = {"id": layerid}
                        results[layerid] = result

                    result["properties"] = layer["properties"]
                    result["geometry_property"] = None
                    result["geometry_properties"] = []
                    result["geometry_type"] = None
                    result["geometry_property_msg"] = None

                    del layers[index]

                    #find spatial columns
                    for prop in layer["properties"]:
                        if prop["type"].startswith("gml:"):
                            #spatial column
                            result["geometry_properties"].append(prop)

                    if len(result["geometry_properties"]) == 1:
                        result["geometry_property"] = result[
                            "geometry_properties"][0]
                        result["geometry_type"] = result[
                            "geometry_properties"][0]["localType"].lower()
                    elif len(result["geometry_properties"]) > 1:
                        #have more than one geometry properties, try to find the right one
                        if layer_ws:
                            url = "{}/{}/ows?service=WFS&version=2.0.0&request=GetFeature&typeName={}&count=1&outputFormat=application%2Fjson".format(
                                kmiserver, layer_ws, layerid)
                        else:
                            url = "{}/ows?service=WFS&version=2.0.0&request=GetFeature&typeName={}&count=1&outputFormat=application%2Fjson".format(
                                kmiserver, layerid)

                        res = requests.get(url,
                                           verify=False,
                                           cookies=session_cookie)
                        res.raise_for_status()
                        featuresdata = res.json()
                        if len(featuresdata["features"]) > 0:
                            feat = featuresdata["features"][0]
                            for prop in result["geometry_properties"]:
                                if prop["name"] == feat["geometry_name"]:
                                    result["geometry_property"] = prop
                                    result["geometry_type"] = prop[
                                        "localType"].lower()
                                    break

                        if not result["geometry_property"]:
                            result[
                                "geometry_property_msg"] = "Layer '{}' has more than one geometry columns, can't identity which column is used as the geometry column.".format(
                                    layerid)
                    else:
                        result[
                            "geometry_property_msg"] = "Layer '{}' is not a spatial layer".format(
                                layerid)

                    if result["geometry_property"]:
                        #found the geometry property, remove it from properties
                        index = len(result["properties"]) - 1
                        while index >= 0:
                            if result["properties"][index] == result[
                                    "geometry_property"]:
                                #this is the geometry property,remove it from properties
                                del result["properties"][index]
                                break
                            index -= 1

                    #cache it for 1 day
                    key = layerdefinitionkey(layerid)
                    try:
                        if uwsgi.cache_exists(key):
                            uwsgi.cache_update(key, json.dumps(result),
                                               24 * 3600)
                        else:
                            uwsgi.cache_set(key, json.dumps(result), 24 * 3600)
                    except:
                        pass

        if len(layers) == 1:
            if layer_ws:
                raise Exception("The layer({}:{}) Not Found".format(
                    layer_ws, layers[0]))
            else:
                raise Exception("The layer({}) Not Found".format(layers[0]))
        elif len(layers) > 1:
            if layer_ws:
                raise Exception("The layers({}) Not Found".format(",".join(
                    ["{}:{}".format(layer_ws, l) for l in layers])))
            else:
                raise Exception("The layers({}) Not Found".format(
                    ",".join(layers)))

    if multiple_layers:
        return results
    else:
        return results[layerids[0]]
Example #31
0
 def test_failed_by_one(self):
     self.assertIsNone(uwsgi.cache_update('key1', 'HELLO', 0, 'items_1'))
Example #32
0
def get_layerdefinition(layerids,kmiserver="https://kmi.dbca.wa.gov.au/geoserver",results={}):
    kmiserver = get_kmiserver(kmiserver)

    multiple_layers = True
    if isinstance(layerids,basestring):
        layerids = [layerids]
        multiple_layers = False
    #group layers against layer workspace
    layers = {}
    for layerid in layerids:
        layerid = layerid.strip()
        #check whether it is cached or not
        key = layerdefinitionkey(layerid)
        if uwsgi.cache_exists(key):
            try:
                definitiondata = uwsgi.cache_get(key)
                if definitiondata:
                    if layerid in results:
                        results[layerid].update(json.loads(definitiondata))
                    else:
                        results[layerid] = json.loads(definitiondata)
                    continue
            except:
                pass

        layer = layerid.split(":")

        if len(layer) == 1:
            #no workspace
            layer_ws = ""
            layer = layer[0]
        else:
            layer_ws = layer[0]
            layer = layer[1]

        if layer_ws not in layers:
            layers[layer_ws] = [layerid]
        else:
            layers[layer_ws].append(layerid)

    if layers:
        kmiserver = get_kmiserver(kmiserver)
        session_cookie = settings.get_session_cookie()

        url = None
        for layer_ws,layers in layers.iteritems():
            if layer_ws:
                url = "{}/{}/wfs?request=DescribeFeatureType&version=2.0.0&service=WFS&outputFormat=application%2Fjson&typeName=".format(kmiserver,layer_ws,",".join(layers))
            else:
                url = "{}/wfs?request=DescribeFeatureType&version=2.0.0&service=WFS&outputFormat=application%2Fjson&typeName=".format(kmiserver,",".join(layers))

            res = requests.get(
                url,
                verify=False,
                cookies=session_cookie
            )
            res.raise_for_status()
            layersdata = res.json()

            for layer in layersdata.get("featureTypes") or []:
                if layer_ws:
                    layerid = "{}:{}".format(layer_ws,layer["typeName"])
                else:
                    layerid = layer["typeName"]
                try:
                    index = layers.index(layerid)
                except:
                    index = -1
                if index >= 0:
                    #this layer's metadata is requsted by the user
                    if layerid in results:
                        result = results[layerid]
                    else:
                        result = {"id":layerid}
                        results[layerid] = result

                    result["properties"] = layer["properties"]
                    result["geometry_property"] = None
                    result["geometry_properties"] = []
                    result["geometry_type"] = None
                    result["geometry_property_msg"] = None

                    del layers[index]

                    #find spatial columns
                    for prop in layer["properties"]:
                        if prop["type"].startswith("gml:"):
                            #spatial column
                            result["geometry_properties"].append(prop)


                    if len(result["geometry_properties"]) == 1:
                        result["geometry_property"] = result["geometry_properties"][0]
                        result["geometry_type"] = result["geometry_properties"][0]["localType"].lower()
                    elif len(result["geometry_properties"]) > 1:
                        #have more than one geometry properties, try to find the right one
                        if layer_ws:
                            url = "{}/{}/ows?service=WFS&version=2.0.0&request=GetFeature&typeName={}&count=1&outputFormat=application%2Fjson".format(kmiserver,layer_ws,layerid)
                        else:
                            url = "{}/ows?service=WFS&version=2.0.0&request=GetFeature&typeName={}&count=1&outputFormat=application%2Fjson".format(kmiserver,layerid)

                        res = requests.get(
                            url,
                            verify=False,
                            cookies=session_cookie
                        )
                        res.raise_for_status()
                        featuresdata = res.json()
                        if len(featuresdata["features"]) > 0:
                            feat = featuresdata["features"][0]
                            for prop in result["geometry_properties"]:
                                if prop["name"] == feat["geometry_name"]:
                                    result["geometry_property"] = prop
                                    result["geometry_type"] = prop["localType"].lower()
                                    break

                        if not result["geometry_property"]:
                            result["geometry_property_msg"] = "Layer '{}' has more than one geometry columns, can't identity which column is used as the geometry column.".format(layerid)
                    else:
                        result["geometry_property_msg"] = "Layer '{}' is not a spatial layer".format(layerid)

                    if result["geometry_property"]:
                        #found the geometry property, remove it from properties
                        index = len(result["properties"]) - 1
                        while index >= 0:
                            if result["properties"][index] == result["geometry_property"]:
                                #this is the geometry property,remove it from properties
                                del result["properties"][index]
                                break
                            index -= 1



                    #cache it for 1 day
                    key = layerdefinitionkey(layerid)
                    try:
                        if uwsgi.cache_exists(key):
                            uwsgi.cache_update(key, json.dumps(result),24 * 3600)
                        else:
                            uwsgi.cache_set(key, json.dumps(result),24 * 3600)
                    except:
                        pass
                        
        if len(layers) == 1:
            if layer_ws:
                raise Exception("The layer({}:{}) Not Found".format(layer_ws,layers[0]))
            else:
                raise Exception("The layer({}) Not Found".format(layers[0]))
        elif len(layers) > 1:
            if layer_ws:
                raise Exception("The layers({}) Not Found".format(",".join(["{}:{}".format(layer_ws,l) for l in layers])))
            else:
                raise Exception("The layers({}) Not Found".format(",".join(layers)))

    if multiple_layers:
        return results
    else:
        return results[layerids[0]]
Example #33
0
 def test_ok_four_bytes(self):
     self.assertTrue(uwsgi.cache_update('key1', 'HELL', 0, 'items_1'))
 def _cache_save_sids(self):
     # Save current sids list for current worker
     uwsgi.cache_update(self.cache_worker_key % self.worker_id,
                        pickle.dumps(self.sids), 0, self.cache)
Example #35
0
 def test_overlapping(self):
     self.assertTrue(uwsgi.cache_update('key1', 'HE', 0, 'items_2'))
     self.assertIsNone(uwsgi.cache_update('key1', 'HELL', 0, 'items_2'))
     self.assertTrue(uwsgi.cache_del('key1', 'items_2')) 
     self.assertTrue(uwsgi.cache_update('key1', 'HELL', 0, 'items_2'))
Example #36
0
def screenx_cache_set(k,v):
    if UWSGI_ENV:
        v = str(int(v))
        uwsgi.cache_update(k, v)
    else:
        SCREENX_CACHE[k] = v
Example #37
0
 def test_big_update(self):
     self.assertTrue(uwsgi.cache_set('key1', 'X' * 40 , 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_update('key1', 'X' * 10 , 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_del('key1', 'items_4_10'))
     self.assertIsNone(uwsgi.cache_update('key1', 'X' * 51 , 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_update('key1', 'X' * 50 , 0, 'items_4_10'))
Example #38
0
 def __setitem__(self, item, value):
     uwsgi.cache_update(item, value)
Example #39
0
def get_layermetadata(layerids, kmiserver=settings.KMI_SERVER, results={}):
    multiple_layers = True
    if isinstance(layerids, basestring):
        layerids = [layerids]
        multiple_layers = False
    #group layers against layer workspace
    layers = {}
    for layerid in layerids:
        layerid = layerid.strip()
        #check whether it is cached or not
        key = layermetadatakey(layerid)
        if uwsgi.cache_exists(key):
            try:
                metadata = uwsgi.cache_get(key)
                if metadata:
                    if layerid in results:
                        results[layerid].update(json.loads(metadata))
                    else:
                        results[layerid] = json.loads(metadata)
                    #print("Retrieve the metadata from cache for layer ({})".format(layerid))
                    continue
            except:
                pass

        layer = layerid.split(":")

        if len(layer) == 1:
            #no workspace
            layer_ws = ""
            layer = layer[0]
        else:
            layer_ws = layer[0]
            layer = layer[1]

        if layer_ws not in layers:
            layers[layer_ws] = [layer]
        else:
            layers[layer_ws].append(layer)

    if layers:
        session_cookie = settings.get_session_cookie()
        kmiserver = get_kmiserver(kmiserver)
        #find the layer's metadata
        url = None
        for layer_ws, layers in layers.iteritems():
            if layer_ws:
                url = "{}/{}/wms?service=wms&version=1.1.1&request=GetCapabilities".format(
                    kmiserver, layer_ws)
            else:
                url = "{}/wms?service=wms&version=1.1.1&request=GetCapabilities".format(
                    kmiserver)

            res = requests.get(url, verify=False, cookies=session_cookie)
            res.raise_for_status()

            tree = ET.fromstring(res.content)

            capability = tree.find('Capability')
            if not len(capability):
                raise Exception("getCapability failed")
            kmi_layers = capability.findall("Layer")
            while kmi_layers:
                kmi_layer = kmi_layers.pop()
                name = get_child_value(kmi_layer, "Name")

                if name:
                    try:
                        index = layers.index(name)
                    except:
                        index = -1
                    if index >= 0:
                        #this layer's metadata is requsted by the user
                        if layer_ws:
                            layerid = "{}:{}".format(layer_ws, name)
                        else:
                            layerid = name

                        if layerid in results:
                            result = results[layerid]
                        else:
                            result = {"id": layerid}
                            results[layerid] = result

                        del layers[index]

                        result["title"] = get_child_value(kmi_layer, "Title")
                        result["abstract"] = get_child_value(
                            kmi_layer, "Abstract")
                        result["srs"] = get_child_value(kmi_layer, "SRS")
                        bbox = kmi_layer.find("LatLonBoundingBox")
                        if bbox is not None:
                            result["latlonBoundingBox"] = [
                                float(bbox.attrib["miny"]),
                                float(bbox.attrib["minx"]),
                                float(bbox.attrib["maxy"]),
                                float(bbox.attrib["maxx"])
                            ]
                        else:
                            result["latlonBoundingBox"] = None
                        for bbox in kmi_layer.findall("BoundingBox"):
                            result["latlonBoundingBox_{}".format(
                                bbox.attrib["SRS"].upper())] = [
                                    float(bbox.attrib["miny"]),
                                    float(bbox.attrib["minx"]),
                                    float(bbox.attrib["maxy"]),
                                    float(bbox.attrib["maxx"])
                                ]

                        #cache it for 6 hours
                        key = layermetadatakey(result["id"])
                        try:
                            if uwsgi.cache_exists(key):
                                uwsgi.cache_update(key, json.dumps(result),
                                                   6 * 3600)
                            else:
                                uwsgi.cache_set(key, json.dumps(result),
                                                6 * 3600)
                        except:
                            pass

                        #print("Retrieve the metadata from kmi for layer ({})".format(result["id"]))

                        if len(layers):
                            continue
                        else:
                            #already find metadata for all required layers
                            break
                sub_layers = kmi_layer.findall("Layer")
                if sub_layers:
                    kmi_layers += sub_layers

            if len(layers) == 1:
                if layer_ws:
                    raise Exception("The layer({}:{}) Not Found".format(
                        layer_ws, layers[0]))
                else:
                    raise Exception("The layer({}) Not Found".format(
                        layers[0]))
            elif len(layers) > 1:
                if layer_ws:
                    raise Exception("The layers({}) Not Found".format(",".join(
                        ["{}:{}".format(layer_ws, l) for l in layers])))
                else:
                    raise Exception("The layers({}) Not Found".format(
                        ",".join(layers)))

    if multiple_layers:
        return results
    else:
        return results[layerids[0]]
Example #40
0
 def set(self, key, value, ttl):
     cache_update(key, value, ttl, self.name)
Example #41
0
from FancyModel import FancyModel
import uwsgi
import json

if __name__ == '__main__':
    fnc = FancyModel()
    while True:
        uwsgi.mule_get_msg()
        req = uwsgi.queue_pull()
        if req is None:
            continue
        json_in = json.loads(req.decode("utf-8"))
        text = json_in["text"]
        # to store transliterations
        json_out = {"res": fnc.predict(text)}
        uwsgi.cache_update(json_in.get("id"),
                           json.dumps(json_out, ensure_ascii=False), 0,
                           "mcache")
Example #42
0
def _get_profile(app):
    #get app profile
    profile = None
    appPath = os.path.join(DIST_PATH,"{}.js".format(app))
    if not os.path.exists(appPath):
        appPath = os.path.join(DIST_PATH,"sss.js")

    key = "{}_profile".format(app)
    profileChanged = False
    
    if uwsgi.cache_exists(key):
        profile = uwsgi.cache_get(key)
    
    if profile:
        profile = json.loads(profile)
        if repr(os.path.getmtime(appPath)) != profile["mtime"] or os.path.getsize(appPath) != profile["size"]:
            profileChanged = True
            profile = None

    if not profile:
        file_data = None
        with open(appPath,"rb") as f:
            file_data = f.read()
        m = profile_re.search(file_data)
        profile = m.group("profile") if m else "{}"
        profile = {
            'mtime':repr(os.path.getmtime(appPath)),
            'size':os.path.getsize(appPath),
            'profile':demjson.decode(profile)
        }
        m = hashlib.md5()
        m.update(file_data)
        profile['profile']['build']['md5'] = base64.urlsafe_b64encode(m.digest()).rstrip("=")
        file_data = None
        if profileChanged:
            uwsgi.cache_update(key, json.dumps(profile))
        else:
            uwsgi.cache_set(key, json.dumps(profile))

    profile["profile"]["dependents"] = {}
    #get vendor md5
    vendorPath = os.path.join(DIST_PATH,"vendor.js")
    if not os.path.exists(vendorPath):
        raise Exception("Vendor library not found")
    key = "{}_profile".format("vendor")

    profileChanged = False
    vendorProfile = None
    if uwsgi.cache_exists(key):
        vendorProfile = uwsgi.cache_get(key)
    
    if vendorProfile:
        vendorProfile = json.loads(vendorProfile)
        if repr(os.path.getmtime(vendorPath)) != vendorProfile["mtime"] or os.path.getsize(vendorPath) != vendorProfile["size"]:
            profileChanged = True
            vendorProfile = None

    if not vendorProfile:
        m = hashlib.md5()
        with open(vendorPath,"rb") as f:
            m.update(f.read())
        vendorProfile = {
            'mtime':repr(os.path.getmtime(vendorPath)),
            'size':os.path.getsize(vendorPath),
            'vendorMD5':base64.urlsafe_b64encode(m.digest()).rstrip("=")
        }
        if profileChanged:
            uwsgi.cache_update(key, json.dumps(vendorProfile))
        else:
            uwsgi.cache_set(key, json.dumps(vendorProfile))

    profile["profile"]["dependents"]["vendorMD5"] = vendorProfile["vendorMD5"]

    #get env profile
    envPath = os.path.join(BASE_DIST_PATH,'release','static','js',"{}-{}.env.js".format(app,ENV_TYPE))
    if not os.path.exists(envPath):
        raise Exception("'{}-{}.env.js' is missing.".format(app,ENV_TYPE))
    else:
        key = "{}_{}_profile".format("env",ENV_TYPE)
        profileChanged = False

        envProfile = None
        if uwsgi.cache_exists(key):
            envProfile = uwsgi.cache_get(key)
    
        if envProfile:
            envProfile = json.loads(envProfile)
            if repr(os.path.getmtime(envPath)) != envProfile["mtime"] or os.path.getsize(envPath) != envProfile["size"]:
                profileChanged = True
                envProfile = None

        if not envProfile:
            m = hashlib.md5()
            with open(envPath,"rb") as f:
                m.update(f.read())
            envProfile = {
                'mtime':repr(os.path.getmtime(envPath)),
                'size':os.path.getsize(envPath),
                'envMD5':base64.urlsafe_b64encode(m.digest()).rstrip("=")
            }
            if profileChanged:
                uwsgi.cache_update(key, json.dumps(envProfile))
            else:
                uwsgi.cache_set(key, json.dumps(envProfile))

        profile["profile"]["dependents"]["envMD5"] = envProfile["envMD5"]
        profile["profile"]["envType"] = ENV_TYPE

    #get style profile
    stylePath = os.path.join(BASE_DIST_PATH,'release','static','css',"style.css")
    if not os.path.exists(stylePath):
        raise Exception("'style.css' is missing.")
    else:
        key = "style_profile"
        profileChanged = False

        styleProfile = None
        if uwsgi.cache_exists(key):
            styleProfile = uwsgi.cache_get(key)
    
        if styleProfile:
            styleProfile = json.loads(styleProfile)
            if repr(os.path.getmtime(stylePath)) != styleProfile["mtime"] or os.path.getsize(stylePath) != styleProfile["size"]:
                profileChanged = True
                styleProfile = None

        if not styleProfile:
            m = hashlib.md5()
            with open(stylePath,"rb") as f:
                m.update(f.read())
            styleProfile = {
                'mtime':repr(os.path.getmtime(stylePath)),
                'size':os.path.getsize(stylePath),
                'styleMD5':base64.urlsafe_b64encode(m.digest()).rstrip("=")
            }
            if profileChanged:
                uwsgi.cache_update(key, json.dumps(styleProfile))
            else:
                uwsgi.cache_set(key, json.dumps(styleProfile))

        profile["profile"]["dependents"]["styleMD5"] = styleProfile["styleMD5"]


    return profile["profile"]