def fakeFilterVars(self): """Add a faked :filter form variable for each filtering prop.""" cls = self.db.classes[self.classname] for key in self.form.keys(): prop = cls.get_transitive_prop(key) if not prop: continue if isinstance(self.form[key], type([])): # search for at least one entry which is not empty for minifield in self.form[key]: if minifield.value: break else: continue else: if not self.form[key].value: continue if isinstance(prop, hyperdb.String): v = self.form[key].value l = token.token_split(v) if len(l) > 1 or l[0] != v: self.form.value.remove(self.form[key]) # replace the single value with the split list for v in l: self.form.value.append(cgi.MiniFieldStorage( key, v)) self.form.value.append(cgi.MiniFieldStorage('@filter', key))
def testMixedMultilink(self): form = cgi.FieldStorage() form.list.append(cgi.MiniFieldStorage('nosy', '1,2')) form.list.append(cgi.MiniFieldStorage('nosy', '3')) cl = client.Client(self.instance, None, {'PATH_INFO':'/'}, form) cl.classname = 'issue' cl.nodeid = None cl.db = self.db cl.language = ('en',) self.assertEqual(cl.parsePropsFromForm(create=1), ({('issue', None): {'nosy': ['1','2', '3']}}, []))
def makeForm(args): form = cgi.FieldStorage() for k, v in args.items(): if type(v) is type([]): [form.list.append(cgi.MiniFieldStorage(k, x)) for x in v] elif isinstance(v, FileUpload): x = cgi.MiniFieldStorage(k, v.content) x.filename = v.filename form.list.append(x) else: form.list.append(cgi.MiniFieldStorage(k, v)) return form
def test_getvalue(self): f = FieldStorage() f.list.append(cgi.MiniFieldStorage(b"key1", b"dingdong")) f.list.append(cgi.MiniFieldStorage(b"key2", None)) assert utils.is_text(f.getvalue("key1")) assert f.getvalue("key1") == "dingdong" assert f.getvalue("key2") == None assert f.getvalue("key3") == None assert f.getvalue("key3", b"x") == "x"
def read_urlencoded(self): indexed = {} self.list = [] for field, value in cgi.parse_qsl(self.fp.read(self.length), self.keep_blank_values, self.strict_parsing): if self.FIELD_AS_ARRAY.match(field): field_group, field_key = self.FIELD_AS_ARRAY.match(field).groups() indexed.setdefault(field_group, cgi.MiniFieldStorage(field_group, {})) indexed[field_group].value[field_key] = value.decode('utf8') else: self.list.append(cgi.MiniFieldStorage(field, value.decode('utf8'))) self.list = indexed.values() + self.list self.skip_lines()
def reflect(environ, start_response): if environ.get('tiddlyweb.type', '') == 'multipart/form-data': form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ) else: # This hack to ensure that we have a uniform interface # On the cgi form field values whether we are multipart or # url encoded. form = cgi.FieldStorage() form.list = [] for key, value in environ['tiddlyweb.query'].items(): for single_value in value: form.list.append(cgi.MiniFieldStorage(key, single_value)) # Ordering is important here. File will often appear true when # it is not. if 'uri' in form and form['uri'].value: try: uri = form.getfirst('uri') request = urllib2.Request(uri) if (request.get_type() != 'file'): filehandle = urllib2.urlopen(uri) type = filehandle.info()['content-type'] else: raise ValueError('file: not allowed') except (ValueError, AttributeError, urllib2.URLError), exc: raise HTTP400('URI Input error: %s' % exc)
def add_qs(self, qs): """Add all non-existing parameters from the given query string.""" r = {} for name_value in qs.split('&'): for name_value in name_value.split(';'): nv = name_value.split('=', 2) if len(nv) != 2: if self.strict_parsing: raise ValueError('bad query field: %r' % (name_value, )) continue name = urllib.unquote(nv[0].replace('+', ' ')) value = urllib.unquote(nv[1].replace('+', ' ')) if len(value) or self.keep_blank_values: if name in r: r[name].append(value) else: r[name] = [value] if self.list is None: # This makes sure self.keys() are available, even # when valid POST data wasn't encountered. self.list = [] for key in r: if key not in self: # Only append values that aren't already the FieldStorage; # this makes POSTed vars override vars on the query string. for value in r[key]: self.list.append(cgi.MiniFieldStorage(key, value))
def read_multi(self, environ, keep_blank_values, strict_parsing): """Internal: read a part that is itself multipart.""" ib = self.innerboundary if not cgi.valid_boundary(ib): raise ValueError( 'Invalid boundary in multipart form: %r' % (ib,)) self.list = [] if self.qs_on_post: query = cgi.urllib.parse.parse_qsl( self.qs_on_post, self.keep_blank_values, self.strict_parsing, encoding=self.encoding, errors=self.errors) for key, value in query: self.list.append(cgi.MiniFieldStorage(key, value)) klass = self.FieldStorageClass or self.__class__ first_line = self.fp.readline() # bytes if not isinstance(first_line, bytes): raise ValueError("%s should return bytes, got %s" % (self.fp, type(first_line).__name__)) self.bytes_read += len(first_line) # Ensure that we consume the file until we've hit our innerboundary while (first_line.strip() != (b"--" + self.innerboundary) and first_line): first_line = self.fp.readline() self.bytes_read += len(first_line) while True: parser = cgi.FeedParser() hdr_text = b"" while True: data = self.fp.readline() hdr_text += data if not data.strip(): break if not hdr_text: break # parser takes strings, not bytes self.bytes_read += len(hdr_text) parser.feed(hdr_text.decode(self.encoding, self.errors)) headers = parser.close() # Some clients add Content-Length for part headers, ignore them if 'content-length' in headers: filename = None if 'content-disposition' in self.headers: cdisp, pdict = parse_header(self.headers['content-disposition']) if 'filename' in pdict: filename = pdict['filename'] if filename is None: del headers['content-length'] part = klass(self.fp, headers, ib, environ, keep_blank_values, strict_parsing, self.limit-self.bytes_read, self.encoding, self.errors) self.bytes_read += part.bytes_read self.list.append(part) if part.done or self.bytes_read >= self.length > 0: break self.skip_lines()
def handle(self): # Check for IDs first SearchIDAction.handle(self) # regular search, fill out query parameters for k, v in [('@columns', 'id,activity,title,creator,assignee,status,type'), #columns_showall ('@sort', '-activity'), ('ignore', 'file:content')]: self.form.value.append(cgi.MiniFieldStorage(k, v))
def addMockField(self, key, value): """Real cgi.FieldStorage instances are readonly, and thus do not support such a method. This is what allows the test developer to manufacture a Mock request object. >>> mockForm = MockCGIFieldStorage() >>> mockForm.addMockField("MyField","MyValue") >>> mockForm["MyField"].value 'MyValue' """ self[key] = cgi.MiniFieldStorage(key,value)
def read_urlencoded(self): """Internal: read data in query string format.""" qs = self.fp.read(self.length) if self.qs_on_post: qs += '&' + self.qs_on_post self._raw_request = qs self.list = [] for key, value in urlparse.parse_qsl(qs, self.keep_blank_values, self.strict_parsing): self.list.append(cgi.MiniFieldStorage(key, value)) self.skip_lines()
def _get_constraints(self, req): constraints = {} ticket_fields = [ f['name'] for f in TicketSystem(self.env).get_ticket_fields() ] # A special hack for Safari/WebKit, which will not submit dynamically # created check-boxes with their real value, but with the default value # 'on'. See also htdocs/query.js#addFilter() checkboxes = [k for k in req.args.keys() if k.startswith('__')] if checkboxes: import cgi for checkbox in checkboxes: (real_k, real_v) = checkbox[2:].split(':', 2) req.args.list.append(cgi.MiniFieldStorage(real_k, real_v)) # For clients without JavaScript, we remove constraints here if # requested remove_constraints = {} to_remove = [ k[10:] for k in req.args.keys() if k.startswith('rm_filter_') ] if to_remove: # either empty or containing a single element match = re.match(r'(\w+?)_(\d+)$', to_remove[0]) if match: remove_constraints[match.group(1)] = int(match.group(2)) else: remove_constraints[to_remove[0]] = -1 for field in [k for k in req.args.keys() if k in ticket_fields]: vals = req.args[field] if not isinstance(vals, (list, tuple)): vals = [vals] vals = map(lambda x: x.value, vals) if vals: mode = req.args.get(field + '_mode') if mode: vals = map(lambda x: mode + x, vals) if remove_constraints.has_key(field): idx = remove_constraints[field] if idx >= 0: del vals[idx] if not vals: continue else: continue constraints[field] = vals return constraints
def read_single(self): qs = self.fp.read(self.length) if qs.strip() == '': raise AMFDecodeException('empty AMF data on decode') ct = amfast.context.DecoderContext(qs, amf3=True, class_def_mapper=self.classmapper) data = amfast.decoder.decode(ct) ct = None self.list = [ cgi.MiniFieldStorage(k, v) for k, v in data.amf_payload.iteritems() ] self.skip_lines()
def handle(self): query = {} if 'openid.identity' not in self.form: raise ValueError, "OpenID fields missing" # re-authenticate fields for key in self.form: if key.startswith("openid"): value = self.form[key].value try: query[key].append(value) except KeyError: query[key] = [value] claimed = self.authenticate(query) # OpenID signature is still authentic, now pass it on to the base # register method; also fake password # Consume nonce first self.store_nonce(query) self.form.value.append(cgi.MiniFieldStorage('openids', claimed)) pwd = password.generatePassword() self.form.value.append(cgi.MiniFieldStorage('password', pwd)) self.form.value.append(cgi.MiniFieldStorage('@confirm@password', pwd)) return RegisterAction.handle(self)
def atmos_mask_test(): params = cgi.FieldStorage() for name, value in { "INVOCATION" : "terminal", "SAVE_LOCAL": "1", "REQUEST" : "GetFullFigure", "BBOX" : "70,-50,180,-5", "WIDTH" : "640", "HEIGHT" : "300", "DAP_URL" : 'http://opendap.bom.gov.au:8080/thredds/dodsC/PASAP/atmos_latest.nc', "LAYER" : 'hr24_prcp', "STYLE" : 'contour' #"STYLE" : 'grid' }.items(): params.list.append(cgi.MiniFieldStorage(name, value)) doWMS(params)
def add_qs(self, qs): """Add all non-existing parameters from the given query string.""" values = defaultdict(list) # split the query string in the same way as the current Python does it try: max_num_fields = self.max_num_fields except AttributeError: max_num_fields = None try: separator = self.separator except AttributeError: # splitting algorithm before Python 3.6.13 if max_num_fields is not None: num_fields = 1 + qs.count('&') + qs.count(';') if max_num_fields < num_fields: raise ValueError('Max number of fields exceeded') pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')] else: if not separator or not isinstance(separator, (str, bytes)): return # invalid separator, do nothing in this case if max_num_fields is not None: num_fields = 1 + qs.count(separator) if max_num_fields < num_fields: raise ValueError('Max number of fields exceeded') # new splitting algorithm that only supports one separator pairs = qs.split(separator) for name_value in pairs: nv = name_value.split('=', 1) if len(nv) != 2: if self.strict_parsing: raise ValueError(f'bad query field: {name_value!r}') continue name = parse.unquote(nv[0].replace('+', ' ')) value = parse.unquote(nv[1].replace('+', ' ')) if len(value) or self.keep_blank_values: values[name].append(value) if self.list is None: # This makes sure self.keys() are available, even # when valid POST data wasn't encountered. self.list = [] for key in values: if key not in self: # Only append values that aren't already the FieldStorage; # this makes POSTed vars override vars on the query string. for value in values[key]: self.list.append(cgi.MiniFieldStorage(key, value))
def ocean_mask_test(): params = cgi.FieldStorage() for name, value in { "INVOCATION" : "terminal", "SAVE_LOCAL": "1", "REQUEST" : "GetFullFigure", "BBOX" : "00,-90,360,90", # "WIDTH" : "640", # "HEIGHT" : "300", # CHANGED "WIDTH" : "800", "HEIGHT" : "600", "DAP_URL" : 'http://opendap.bom.gov.au:8080/thredds/dodsC/PASAP/ocean_latest.nc', "LAYER" : 'SSTA', "STYLE" : 'contour' #"STYLE" : 'grid' }.items(): params.list.append(cgi.MiniFieldStorage(name, value)) doWMS(params)
def parse_qs(self): """ Explicitly parse the query string, even if it's a POST request """ self._method = string.upper(self._environ['REQUEST_METHOD']) if self._method == "GET" or self._method == "HEAD": ## print __file__, "bailing on GET or HEAD request" return #bail because cgi.FieldStorage already did this self._qs = self._environ.get('QUERY_STRING', None) if not self._qs: ## print __file__, "bailing on no query_string" return ##bail if no query string name_value_pairs = string.splitfields(self._qs, '&') dict = {} for name_value in name_value_pairs: nv = string.splitfields(name_value, '=') if len(nv) != 2: if self._strict_parsing: raise ValueError, "bad query field: %s" % ` name_value ` continue name = urllib.unquote(string.replace(nv[0], '+', ' ')) value = urllib.unquote(string.replace(nv[1], '+', ' ')) if len(value) or self._keep_blank_values: if dict.has_key(name): dict[name].append(value) ##print "appending" else: dict[name] = [value] ##print "no append" # Only append values that aren't already in the FieldStorage's keys; # This makes POSTed vars override vars on the query string if not self.list: # This makes sure self.keys() are available, even # when valid POST data wasn't encountered. self.list = [] keys = self.keys() for key, values in dict.items(): if key not in keys: for value in values: self.list.append(cgi.MiniFieldStorage(key, value))
def do_POST(self): form = {} files = {} if self.headers['content-type'].endswith('/json'): # Support json posts as well, fake cgi fields import json content_len = int(self.headers.get('content-length', 0)) content = json.loads(self.rfile.read(content_len)) for k, v in content.items(): form.setdefault(k, []).append(cgi.MiniFieldStorage(k, v)) else: fields = cgi.FieldStorage(self.rfile, self.headers, environ={'REQUEST_METHOD': 'POST'}) for f in fields.list: target = files if f.filename else form # Since HTTP allows for the same field to be present multiple times, add as list target.setdefault(f.name, []).append(f) self._dispatch(form, files)
def handle(self): if 'openid_identifier' in self.form: username = self.form['openid_identifier'].value # copy into __login_name for call to base action self.form.value.append( cgi.MiniFieldStorage('__login_name', username)) else: # Let base action raise the exception return LoginAction.handle(self) if '__login_password' in self.form and self.form[ '__login_password'].value: # assume standard login if password provided return LoginAction.handle(self) try: self.db.user.lookup(username) except KeyError: # not a user name - it must be an openid pass else: return LoginAction.handle(self) # Login an OpenID type, claimed = openid2rp.normalize_uri(username) if type == 'xri': raise ValueError, "XRIs are not supported" discovered = openid2rp.discover(claimed) if not discovered: raise ValueError, "OpenID provider discovery failed" self.store_discovered(claimed, *discovered) stypes, url, op_local = discovered session = self.get_session(url, stypes) realm = self.base + "?@action=openid_return" return_to = realm + "&__came_from=%s" % urllib.quote(self.client.path) url = openid2rp.request_authentication(stypes, url, session.assoc_handle, return_to, realm=realm, claimed=claimed, op_local=op_local) raise Redirect, url
def parse_get_qs(qs, fs, keep_blank_values=0, strict_parsing=0): r = {} for name_value in qs.split('&'): nv = name_value.split('=', 2) if len(nv) != 2: if strict_parsing: raise ValueError, "bad query field: %r" % (name_value,) continue name = urllib.unquote(nv[0].replace('+', ' ')) value = urllib.unquote(nv[1].replace('+', ' ')) if len(value) or keep_blank_values: if r.has_key(name): r[name].append(value) else: r[name] = [value] # Only append values that aren't already in the FieldStorage's keys; # This makes POSTed vars override vars on the query string for key, values in r.items(): if not fs.has_key(key): for value in values: fs.list.append(cgimodule.MiniFieldStorage(key, value)) return fs
def testSubmitDatasetHandlerEmptyDirectorySubmission(self): outputStr = StringIO.StringIO() # reset the Dataset Directory to point to an empty directory formdata = TestConfig.formdata.copy() formdata['datDir'] = cgi.MiniFieldStorage('datDir', TestConfig.DatasetsEmptyDirPath) # Invoke dataset submission program, passing faked form submission parameters SubmitDatasetConfirmationHandler.processDatasetSubmissionForm(formdata, outputStr) # Check that the dataset created for unzipped data can be dereferenced in the databank self.session.doHTTP_GET(resource="/" + TestConfig.SiloName +"/datasets/"+TestConfig.DatasetId+"-packed", expect_status=200, expect_reason="OK", accept_type="application/json") # Invoke dataset submission program yet again. # This time, bypassing the dataset creation but continuing submittion of data to the already exiting dataset SubmitDatasetConfirmationHandler.processDatasetSubmissionForm(formdata, outputStr) # Check that the dataset created for unzipped data can be dereferenced in the databank self.session.doHTTP_GET(resource="/" + TestConfig.SiloName +"/datasets/"+TestConfig.DatasetId+"-packed", expect_status=200, expect_reason="OK", accept_type="application/json") SubmitDatasetUtils.deleteDataset(self.session, TestConfig.DatasetId+"-packed") return
def read_urlencoded(self): """Internal: read data in query string format.""" body = self.fp.read(self.length) self.file = io.BytesIO(body) qs = String(body, self.encoding, self.errors) if self.qs_on_post: qs += '&' + self.qs_on_post try: if is_py2: query = parse.parse_qsl( qs, self.keep_blank_values, self.strict_parsing, ) else: query = parse.parse_qsl(qs, self.keep_blank_values, self.strict_parsing, encoding=self.encoding, errors=self.errors, max_num_fields=self.max_num_fields) except ValueError: # if the right headers were sent then this should error if self.is_urlencoded() or self.is_multipart(): raise else: self.list = [ cgi.MiniFieldStorage(key, value) for key, value in query ] self.skip_lines()
def set_if_missing(fs, name, value): if value and not fs.has_key(name): fs.list.append(cgi.MiniFieldStorage(name, value))
def setvalue(self, key, val): self.list.append(cgi.MiniFieldStorage(key, val))
def __setitem__(self, name, value): if name in self: del self[name] self.list.append(cgi.MiniFieldStorage(name, value))
def main(): gateway.data_path = config.get('PATHS', 'data_directory') gateway.egs_data_file = config.get('PATHS', 'egs_data_file') gateway.egs_literature_data_file = config.get('PATHS', 'egs_literature_data_file') gateway.benefit_categories_ordered_list = config.get( 'ORDERED_LISTS', 'benefit_categories') gateway.ecosystem_ordered_list = config.get('ORDERED_LISTS', 'ecosystems') gateway.contribution_pathway_ordered_list = config.get( 'ORDERED_LISTS', 'contribution_pathways') arguments = cgi.FieldStorage() ''' set values for testing on the command line. when run as a web service, the variable will be set and this block skipped. On the command line this is used to pass in arguments for testing. values are set in the egs_www.config file ''' if not 'GATEWAY_INTERFACE' in os.environ: for name, value in { "benefit_category": config.has_option('TESTING', 'benefit_category') and config.get('TESTING', 'benefit_category') or None, "ecosystem_type": config.has_option('TESTING', 'ecosystem_type') and config.get('TESTING', 'ecosystem_type') or None, "contribution_pathway": config.has_option('TESTING', 'contribution_pathway') and config.get('TESTING', 'contribution_pathway') or None, "benefit_type": config.has_option('TESTING', 'benefit_type') and config.get('TESTING', 'benefit_type') or None, "data_layer": config.has_option('TESTING', 'data_layer') and config.get('TESTING', 'data_layer') or None, }.items(): if (value): arguments.list.append(cgi.MiniFieldStorage(name, value)) ''' return JSON data based on the highest level filter. Note: the method names reflect the name of the children available based on the users arguments. For example, if 'ecosystem_type' is in the arguments, then 'benefit_category' is also available, and there is enough information to figure out which 'contribution_pathway' data to return ''' data = OrderedDict() if 'data_layer' in arguments: data = gateway.data_layer_details( arguments.getvalue('data_layer'), arguments.getvalue('benefit_type'), arguments.getvalue('contribution_pathway'), arguments.getvalue('ecosystem_type'), arguments.getvalue('benefit_category')) elif 'benefit_type' in arguments: data = gateway.data_layer(arguments.getvalue('benefit_type'), arguments.getvalue('contribution_pathway'), arguments.getvalue('ecosystem_type'), arguments.getvalue('benefit_category')) elif 'contribution_pathway' in arguments: data = gateway.benefit_type(arguments.getvalue('contribution_pathway'), arguments.getvalue('ecosystem_type'), arguments.getvalue('benefit_category')) elif 'ecosystem_type' in arguments: data = gateway.contribution_pathway( arguments.getvalue('ecosystem_type'), arguments.getvalue('benefit_category')) elif 'benefit_category' in arguments: data = gateway.ecosystem_type(arguments.getvalue('benefit_category')) else: data = gateway.benefit_categories() if not 'GATEWAY_INTERFACE' in os.environ: print(json.dumps(data, indent=4, default=lambda x: None)) else: print('Content-Type: application/json') print('') print(json.dumps(data, default=lambda x: None))
def setDatasetsBaseDir(base): global DatasetsBaseDir DatasetsBaseDir = base #global HostName = "zoo-admiral-behav.zoo.ox.ac.uk" #global HostName = "zoo-admiral-silk.zoo.ox.ac.uk" #global HostName = "zoo-admiral-devel.zoo.ox.ac.uk" #global HostName = "zoo-admiral-ibrg.zoo.ox.ac.uk" #global hostname = "zakynthos.zoo.ox.ac.uk" global HostName, SiloName, Username, Password, FileName global FilePath, FileMimeType, ZipMimeType global DirName, DirPath global DatasetsEmptyDirName, DatasetsEmptyDirPath global UpdatedTitle, UpdatedDescription, TestPat #HostName = "localhost" HostName = "zoo-admiral-ibrg.zoo.ox.ac.uk" SiloName = "admiral" Username = "******" Password = "******" FileName = "file1.txt" FilePath = DatasetsBaseDir + os.path.sep + FileName FileMimeType = "text/plain" ZipMimeType = "application/zip" DirName = "DatasetsTopDir" DirPath = DatasetsBaseDir + os.path.sep + DirName DatasetsEmptyDirName = "DatasetsEmptySubDir" DatasetsEmptyDirPath = DatasetsBaseDir + os.path.sep + DirName + os.path.sep + DatasetsEmptyDirName UpdatedTitle = "Updated Title" UpdatedDescription = "Updated Description" TestPat = re.compile("^.*$(?<!\.zip)") global ManifestName, ManifestFilePath ManifestName = "manifest.rdf" ManifestFilePath = DatasetsBaseDir + os.path.sep + DirName + os.path.sep + ManifestName global formdata, updatedformdata formdata = \ { 'datDir' : cgi.MiniFieldStorage('datDir' , DirPath) , 'datId' : cgi.MiniFieldStorage('datId' , "SubmissionToolTest") , 'title' : cgi.MiniFieldStorage('title' , "Submission tool test title") , 'description' : cgi.MiniFieldStorage('description' , "Submission tool test description") , 'user' : cgi.MiniFieldStorage('user' , Username) , 'pass' : cgi.MiniFieldStorage('pass' , Password) , 'endpointhost': cgi.MiniFieldStorage('endpointhost', HostName) , 'basepath' : cgi.MiniFieldStorage('basepath' , "/"+SiloName+"/") , 'submit' : cgi.MiniFieldStorage('submit' , "Submit") , 'directory' : cgi.MiniFieldStorage('directory' , DirPath) } updatedformdata = \ { 'datDir' : cgi.MiniFieldStorage('datDir' , DirPath) , 'datId' : cgi.MiniFieldStorage('datId' , "SubmissionToolTest") , 'title' : cgi.MiniFieldStorage('title' , "Submission tool updated test title") , 'description' : cgi.MiniFieldStorage('description' , "Submission tool updated test description") , 'user' : cgi.MiniFieldStorage('user' , Username) , 'pass' : cgi.MiniFieldStorage('pass' , Password) , 'endpointhost': cgi.MiniFieldStorage('endpointhost', HostName) , 'basepath' : cgi.MiniFieldStorage('basepath' , "/"+SiloName+"/") , 'submit' : cgi.MiniFieldStorage('submit' , "Submit") } global DatasetId, DatasetDir, Title, Description, User, ElementValueList, ElementValueUpdatedList DatasetId = SubmitDatasetUtils.getFormParam('datId', formdata) DatasetDir = SubmitDatasetUtils.getFormParam('datDir', formdata) Title = SubmitDatasetUtils.getFormParam('title', formdata) Description = SubmitDatasetUtils.getFormParam('description', formdata) User = SubmitDatasetUtils.getFormParam('user', formdata) ElementValueList = [User, DatasetId, Title, Description] ElementValueUpdatedList = [User, DatasetId, UpdatedTitle, UpdatedDescription] global dcterms, oxds dcterms = URIRef("http://purl.org/dc/terms/") oxds = URIRef("http://vocab.ox.ac.uk/dataset/schema#") global NamespaceDictionary NamespaceDictionary = { "dcterms" : dcterms , "oxds" : oxds } global ElementCreatorUri,ElementIdentifierUri,ElementTitleUri,ElementDescriptionUri,ElementUriList ElementCreatorUri = URIRef(dcterms + "creator") ElementIdentifierUri = URIRef(dcterms + "identifier") ElementTitleUri = URIRef(dcterms + "title") ElementDescriptionUri = URIRef(dcterms + "description") ElementUriList = [ElementCreatorUri, ElementIdentifierUri, ElementTitleUri, ElementDescriptionUri] return
def __setitem__(self, name, value): if self.has_key(name): del self[name] self.list.append(cgi.MiniFieldStorage(name, value))
def main(): gateway.data_path = os.path.join(root_path, 'data') gateway.huc_file = os.path.join(gateway.data_path, 'huc_hydrologic_unit_codes.csv') gateway.navigation_file = os.path.join(gateway.data_path, 'navigator_huc12.p') arguments = cgi.FieldStorage() # this is used for testing on the command line if not 'GATEWAY_INTERFACE' in os.environ: for name, value in { "attribute" : "ELEVMEAN", "navigation_direction": "downstream", "code" : "130100020705", }.items(): arguments.list.append(cgi.MiniFieldStorage(name, value)) data = OrderedDict() if 'code' in arguments: ''' 'code' can be anything from a Region (2-digits) to a Subwatershed (12-digit) If it is lower than a subwatershed it returns data for the next level. If it is a subwatershed, it returns summary data for upstream (default) or downstream navigation The terms are defined in https://nhd.usgs.gov/wbd_facts.html Watershed Definitions Name Level Digit Number of HUCs Region 1 2 21 Subregion 2 4 222 Basin 3 6 352 Subbasin 4 8 2,149 Watershed 5 10 22,000 Subwatershed 6 12 160,000 ''' huc_code = arguments.getvalue('code') if len(huc_code) == 2: data = gateway.subregion(huc_code) elif len(huc_code) == 4: data = gateway.basin(huc_code) elif len(huc_code) == 6: data = gateway.subbasin(huc_code) elif len(huc_code) == 8: data = gateway.subwatershed(huc_code) elif len(huc_code) == 12: # attributes are not calculated for 'downstream' navigation if 'navigation_direction' in arguments and arguments.getvalue('navigation_direction').upper() == 'Downstream'.upper(): data = gateway.navigate(huc_code, 'downstream') elif 'attribute' in arguments: attribute = arguments.getvalue('attribute') data = gateway.navigate(huc_code, 'upstream') if attribute != 'NONE': data['attribute_results'] = gateway.get_attribute_value(attribute, data['huc12']['value'], data['us_huc12_ids']['value']) else: data = gateway.navigate(huc_code, 'upstream') else: data = {'not': 'yet done'} # elif arguments.has_key('upstream'): # huc_code = arguments.getvalue('upstream') # data = gateway.navigate(huc_code, 'upstream') else: data = gateway.region() print('Content-Type: application/json') print('') print (json.dumps(data, default=lambda x: None))