Esempio n. 1
0
 def __init__(self, name):
     """Constructor"""
     super(Zone, self).__init__(name)
     self._details = config("zone", name)
     self.tiles = []
     self.collision_tiles = []
     self.active_sprites = []
     self.sprites = None
     self._raw_map = readMap(name).split("\n")
     self._generateTileTable()
Esempio n. 2
0
 def deduplicate(self, title):
     '''
     Prevents the re-adding of sling downloads
     @params title - name of granule to check if downloading
     '''
     key = config()['dedup_redis_key']
     global POOL
     if POOL is None:
         POOL = ConnectionPool.from_url(REDIS_URL)
     r = StrictRedis(connection_pool=POOL)
     return r.sadd(key, title) == 0
Esempio n. 3
0
 def __init__(self, name):
     self.name = name
     self._details = config("spell", name)
     self.image = pygame.image.load(spellPath(name))
     self.image = self.image.convert()
     self.rect = self.image.get_rect()
Esempio n. 4
0
 def __init__(self, name):
     self.name = name
     self._details = config("aether", name)
     self.image = pygame.image.load(aetherPath(name))
     self.image = self.image.convert_alpha()
     self.rect = self.image.get_rect()
Esempio n. 5
0
    def parse_params(self, aoi, input_qtype, dns_list_str):
        '''
        parses the parameters from the aoi, determines proper start/end times and returns a dict object containing params
        '''
        params = {}  #dict to be returned holding parsed query parameters
        cfg = config()  #load settings.json
        #determine if end_time has passed, therefore query is expired
        if "temporal" in aoi.keys() and "endquery" in aoi["temporal"].keys():
            if determine_if_expired_query(aoi["temporal"]["endquery"]):
                return None
        if "endquery" in aoi.keys() and determine_if_expired_query(
                aoi["endquery"]):
            return None
        #map the values outside the endpoint specific settings
        #default to looking back window_size days from settings config file, unless window_size is in the aoi
        if "window_size" in aoi.keys():
            window = aoi["window_size"]
        else:
            window = cfg["window-size-days"]
        end_time = datetime.datetime.utcnow().strftime(
            "%Y-%m-%dT%H:%M:%S")  #default end time is now
        #default start time is now - window size
        start_time = (datetime.datetime.utcnow() - datetime.timedelta(
            days=float(window))).strftime("%Y-%m-%dT%H:%M:%S")
        #but if the endpoint aoi is not in redis, default to retrieving everything
        last_query_time = self.loadStamp(self.stampKeyname(aoi, input_qtype))
        if last_query_time is None:
            start_time = "1970-01-01T00:00:00"
        else:
            start_time = last_query_time
        #determine if there is an event time
        event_time = None
        #check aoi
        if "event" in aoi.keys() and "time" in aoi["event"].keys():
            event_time = parse_datetime(aoi["event"]["time"])
        #check aoi["metadata"]
        if "metadata" in aoi.keys() and "event" in aoi[
                "metadata"] and "time" in aoi["metadata"]["event"].keys():
            event_time = parse_datetime(aoi["metadata"]["event"]["time"])
        #now determine start/end times
        #see if starttime and endtime are part of the aoi metadata
        met_start, met_end = parse_start_end(aoi, event_time=event_time)
        if met_start != None: start_time = met_start
        if met_end != None: end_time = met_end
        #set priority, the most nested being the one passed
        priority = 0  #defaults to 0
        if "priority" in aoi.keys():
            priority = aoi["priority"]
        if "metadata" in aoi.keys() and "priority" in aoi["metadata"].keys():
            priority = aoi["metadata"]["priority"]
        #for each query within the aoi
        if not ("metadata" in aoi.keys()
                and "query" in aoi["metadata"].keys()):
            return None  #exit if no query fields exist
        for qtype in aoi["metadata"]["query"].keys():
            #determine endpoint, skip if it does not match the qquery input endpoint
            if qtype != input_qtype:
                continue
            #set the query to run
            query = aoi["metadata"]["query"][qtype]
            #determine priority
            if "priority" in query.keys():
                priority = query["priority"]
            #parse event time for each query, if given
            query_start, query_end = parse_start_end(query,
                                                     event_time=event_time)
            if query_start != None:
                #make sure the query start time is within the start/end window (if specified in aoi metadata)
                if met_start != None and convert_to_dt(
                        query_start) < convert_to_dt(met_start):
                    #the start time in the query is before the metadata window
                    start_time = met_start
                elif met_end != None and convert_to_dt(
                        query_start) > convert_to_dt(met_end):
                    #start of the query window is after the given metadata window
                    return None
                else:
                    #start time is between the metdata start/end window (or the window doesn't exist)
                    start_time = query_start

            if query_end != None:
                #make sure the end time is within the start/end metadata window
                if met_end != None and convert_to_dt(
                        query_end) < convert_to_dt(met_end) and convert_to_dt(
                            start_time) < convert_to_dt(query_end):
                    #the end time in the query is in the metadata window
                    end_time = query_end
                elif met_end != None and convert_to_dt(
                        query_end) > convert_to_dt(met_end):
                    #the end time in the query is after the metadata window
                    end_time = met_end
                else:
                    end_time = query_end

            #determine products to query for within each endpoint
            if "products" not in aoi["metadata"]["query"][qtype]: continue
            products = aoi["metadata"]["query"][qtype]["products"]
            tags = []
            if "tag" in aoi["metadata"].keys():
                tags.append(aoi["metadata"]["tag"])
            if qtype in aoi["metadata"].keys(
            ) and "tag" in aoi["metadata"][qtype].keys():
                for tag in tags:
                    tags.append(aoi["metadata"][qtype]["tag"])

            # parses dns comma seperated string to array
            dns_list = [x.strip() for x in dns_list_str.split(',')]

            #fill parameters
            params["starttime"] = start_time
            params["endtime"] = end_time
            params["priority"] = priority
            params["products"] = products
            params["tag"] = tags
            params["dns_list"] = dns_list
            return params

        return None
Esempio n. 6
0
    def submit_sling_job(self,
                         aoi,
                         query_params,
                         qtype,
                         queue_grp,
                         title,
                         link,
                         rtag=None,
                         pds_queue=None):
        #Query for all products, and return a list of (Title,URL)
        yr, mo, dy = self.getDataDateFromTitle(title)  #date
        filename = title + "." + self.getFileType()

        if not pds_queue:
            # build payload items for job submission
            tags = query_params["tag"]
            md5 = hashlib.md5("{0}.{1}\n".format(
                title, self.getFileType())).hexdigest()
            cfg = config()  # load settings.json
            repo_url = "%s/%s/%s/%s/%s/%s.%s" % (
                cfg["repository-base"], md5[0:8], md5[8:16], md5[16:24],
                md5[24:32], title, self.getFileType())
            location = {}
            location['type'] = 'polygon'
            location['aoi'] = aoi['id']
            location['coordinates'] = aoi['location']['coordinates']
            prod_met = {}
            prod_met['source'] = qtype
            prod_met['dataset_type'] = title[0:3]
            prod_met['spatial_extent'] = location
            prod_met['tag'] = tags
            queue = "factotum-job_worker-%s_throttled" % (
                qtype + str(queue_grp))  # job submission queue
            job_header = 'job-sling:'
            dedup_key = DEDUP_KEY
            params = [{
                "name": "download_url",
                "from": "value",
                "value": link,
            }, {
                "name": "repo_url",
                "from": "value",
                "value": repo_url,
            }, {
                "name": "prod_name",
                "from": "value",
                "value": title,
            }, {
                "name": "file_type",
                "from": "value",
                "value": self.getFileType(),
            }, {
                "name": "prod_date",
                "from": "value",
                "value": "{}".format("%s-%s-%s" % (yr, mo, dy)),
            }, {
                "name": "prod_met",
                "from": "value",
                "value": prod_met,
            }, {
                "name": "options",
                "from": "value",
                "value": "--force_extract"
            }]
        else:
            # queue = "opds-%s-job_worker-small" % (qtype)
            queue = pds_queue  # job submission queue, no queue group for autoscalers
            job_header = 'job-sling-extract-opds:'
            dedup_key = DEDUP_KEY_PDS
            params = [{
                "name": "download_url",
                "from": "value",
                "value": link,
            }, {
                "name": "prod_name",
                "from": "value",
                "value": "%s-pds" % title,
            }, {
                "name": "file",
                "from": "value",
                "value": filename,
            }, {
                "name": "prod_date",
                "from": "value",
                "value": "{}".format("%s-%s-%s" % (yr, mo, dy)),
            }]

        #set sling job spec release/branch
        if rtag is None:
            try:
                with open('_context.json') as json_data:
                    context = json.load(json_data)
                job_spec = job_header + context['job_specification'][
                    'job-version']
            except:
                print('Failed on loading context.json')
        else:
            job_spec = job_header + rtag

        rtime = datetime.datetime.utcnow()
        job_name = "%s-%s-%s-%s-%s" % (job_spec, queue, title,
                                       rtime.strftime("%d_%b_%Y_%H:%M:%S"),
                                       aoi['id'])
        job_name = job_name.lstrip('job-')
        priority = query_params["priority"]

        #Setup input arguments here
        rule = {
            "rule_name": job_spec,
            "queue": queue,
            "priority": priority,
            "kwargs": '{}'
        }

        #check for dedup, if clear, submit job
        if not self.deduplicate(filename, dedup_key):
            submit_mozart_job({},
                              rule,
                              hysdsio={
                                  "id": "internal-temporary-wiring",
                                  "params": params,
                                  "job-specification": job_spec
                              },
                              job_name=job_name)
        else:
            location = " to OpenDataset" if pds_queue else "to own bucket"
            reason = "in OpenDataset" if pds_queue else "in OpenDataset or own bucket"
            print(
                "Will not submit sling job {0} to {1}, already processed {2}".
                format(title, location, reason))
Esempio n. 7
0
import hashlib
import os
import traceback
import re
import requests
import backoff

from redis import ConnectionPool, StrictRedis

from utilities import config, get_aois, get_redis_endpoint
from hysds_commons.job_utils import submit_mozart_job

from hysds.celery import app
REDIS_URL = get_redis_endpoint()
POOL = None
DEDUP_KEY = config()['dedup_redis_key']
DEDUP_KEY_PDS = config()['dedup_redis_key_pds']


class AbstractQuery(object):
    '''
    A class holding all generic query functions
    '''
    @classmethod
    def getQueryHandler(clazz, qtype):
        '''
        Get a handler for this particular query type by searching registered subclasses
        @param qtype - type of query
        '''
        try:
            mod = importlib.import_module(qtype)
Esempio n. 8
0
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument("qtype", help="query endpoint, e.g. (asf|scihub|unavco)")
    parser.add_argument("--dns_list", help="dns list for qtype to use from .netrc, comma separated", required=True)
    parser.add_argument("--tag", help="PGE docker image tag (release, version, " +
                                      "or branch) to propagate",
                        default="master", required=False)
    parser.add_argument("--sling_tag", help="sling PGE docker image tag (release, version, " +
                                      "or branch) to propagate", required=False)
    args = parser.parse_args()

    query_endpoint = args.qtype
    dns_list = args.dns_list
    qquery_rtag = args.tag
    sling_rtag = qquery_rtag if args.sling_tag is None else args.sling_tag

    cfg = config()
    aoi = get_aois(cfg) #retrieves a list of aois that match the grq values in settings.cfg
    aois = sorted(aoi,key=lambda aoi: 0 if (not "metadata" in aoi or not "priority" in aoi["metadata"]) else aoi["metadata"]["priority"],reverse=True)
    for region in aois:
        #for each aoi
        user_tags = region.get('metadata', {}).get('user_tags', [])
        if "inactive" in user_tags:
            #if the region is inactive, skip
            print("AOI {0} marked as inactive. Skipping".format(region["id"]))
            continue

        #skip regions without types_types map
        if "query" not in region["metadata"].keys():
            continue

        #set query priority
Esempio n. 9
0
    def submit_sling_job(self,
                         aoi,
                         query_params,
                         qtype,
                         queue_grp,
                         title,
                         link,
                         rtag=None):
        #Query for all products, and return a list of (Title,URL)
        cfg = config()  #load settings.json
        priority = query_params["priority"]
        products = query_params["products"]
        tags = query_params["tag"]

        #build payload items for job submission
        yr, mo, dy = self.getDataDateFromTitle(title)  #date
        md5 = hashlib.md5("{0}.{1}\n".format(title,
                                             self.getFileType())).hexdigest()
        repo_url = "%s/%s/%s/%s/%s/%s.%s" % (cfg["repository-base"], md5[0:8],
                                             md5[8:16], md5[16:24], md5[24:32],
                                             title, self.getFileType())
        location = {}
        location['type'] = 'polygon'
        location['aoi'] = aoi['id']
        location['coordinates'] = aoi['location']['coordinates']
        prod_met = {}
        prod_met['source'] = qtype
        prod_met['dataset_type'] = title[0:3]
        prod_met['spatial_extent'] = location
        prod_met['tag'] = tags

        #required params for job submission
        if hasattr(self, 'getOauthUrl'):
            #sling via oauth
            oauth_url = self.getOauthUrl()
            job_type = "job:spyddder-sling-oauth_%s" % qtype
            job_name = "spyddder-sling-oauth_%s-%s-%s.%s" % (
                qtype, aoi['id'], title, self.getFileType())
        else:
            #normal sling
            job_type = "job:spyddder-sling_%s" % qtype
            job_name = "spyddder-sling_%s-%s-%s.%s" % (qtype, aoi['id'], title,
                                                       self.getFileType())
            oauth_url = None
        queue = "factotum-job_worker-%s_throttled" % (qtype + str(queue_grp)
                                                      )  # job submission queue

        #set sling job spec release/branch
        if rtag is None:
            try:
                with open('_context.json') as json_data:
                    context = json.load(json_data)
                job_spec = 'job-sling:' + context['job_specification'][
                    'job-version']
            except:
                print('Failed on loading context.json')
        else:
            job_spec = 'job-sling:' + rtag

        rtime = datetime.datetime.utcnow()
        job_name = "%s-%s-%s-%s-%s" % (job_spec, queue, title,
                                       rtime.strftime("%d_%b_%Y_%H:%M:%S"),
                                       aoi['id'])
        job_name = job_name.lstrip('job-')

        #Setup input arguments here
        rule = {
            "rule_name": job_spec,
            "queue": queue,
            "priority": priority,
            "kwargs": '{}'
        }
        params = [{
            "name": "download_url",
            "from": "value",
            "value": link,
        }, {
            "name": "repo_url",
            "from": "value",
            "value": repo_url,
        }, {
            "name": "prod_name",
            "from": "value",
            "value": title,
        }, {
            "name": "file_type",
            "from": "value",
            "value": self.getFileType(),
        }, {
            "name": "prod_date",
            "from": "value",
            "value": "{}".format("%s-%s-%s" % (yr, mo, dy)),
        }, {
            "name": "prod_met",
            "from": "value",
            "value": prod_met,
        }, {
            "name": "options",
            "from": "value",
            "value": "--force_extract"
        }]
        #check for dedup, if clear, submit job
        if not self.deduplicate(title + "." + self.getFileType()):
            submit_mozart_job({},
                              rule,
                              hysdsio={
                                  "id": "internal-temporary-wiring",
                                  "params": params,
                                  "job-specification": job_spec
                              },
                              job_name=job_name)
        else:
            print(
                "Will not submit sling job for {0}, already processed".format(
                    title))