def parseDMSStringSingle(str): '''Parse a single coordinate either DMS or decimal degrees. It simply returns the value but doesn't maintain any knowledge as to whether it is latitude or longitude''' str = str.strip().upper() try: if re.search("[NSEW]", str) == None: coord = float(str) else: # We should have a DMS coordinate if re.search('[NSEW]\s*\d+', str) == None: # We assume that the cardinal directions occur after the digits m = re.findall('(.+)\s*([NSEW])', str) if len(m) != 1 or len(m[0]) != 2: raise ValueError('Invalid DMS Coordinate') coord = LatLon.parseDMS(m[0][0], m[0][1]) else: # The cardinal directions occur at the beginning of the digits m = re.findall('([NSEW])\s*(.+)', str) if len(m) != 1 or len(m[0]) != 2: raise ValueError('Invalid DMS Coordinate') coord = LatLon.parseDMS(m[0][1], m[0][0]) except: raise ValueError('Invalid Coordinates') return coord
def sort(self): if not self.modell_kg is None and not self.Gemeinde is None: self.modell_kg.clear() for item in self.gemeindeliste[str.strip(self.Gemeinde)]: eins = QtGui.QStandardItem(item) eins.setEditable(False) self.modell_kg.appendRow(eins) self.modell_kg.sort(0)
def dinforead(prefix): # mesh data: dfile = prefix + '_dinfo.dat' fd = open(dfile, 'r') s = str.split(str.strip(fd.readline())) nr = int(s[0]) nh = int(s[1]) nphi = int(s[2]) s = str.split(str.strip(fd.readline())) a = double(s[0]) s = str.split(str.strip(fd.readline())) if (s): t = double(s[0]) else: t = 0. print("Kerr a = " + str(a)) fd.close() return nr, nh, nphi, a, t
def get_metadata(ids, output_file=None): """ Retrieves metadata for a folder of folders, where each subfolder is named for a HathiTrust ID. This structure is the default structure extracted from a Data API request (:method htrc.volumes.get_volumes:). """ ids = [str.strip(id).replace('+', ':').replace('=', '/') for id in ids] # data cleanup metadata = dict() for segment in split_items(ids, 50): items = safe_bulk_metadata(segment) metadata.update(items) if output_file: with open(output_file, 'w') as outfile: json.dump(metadata, outfile) return metadata
def gstsuche(self): # Der Username der verwendet werden soll if len(auth_user_global) > 0: # Ist belegt auth_user = auth_user_global[0] else: auth_user = None #Textfeldinhalt zurücksetzen self.gefunden.setText("") self.gefunden.repaint() schema = 'vorarlberg' ################################################ # Geometriespalte bestimmen -- geht nur mit OGR uri = QgsDataSourceUri() uri.setConnection(self.db.hostName(),str(self.db.port()),self.db.databaseName(),'','') # Kein Kennwort nötig, Single Sign On try: if auth_user == None: outputdb = ogr.Open('pg: host =' + self.db.hostName() + ' dbname =' + self.db.databaseName() + ' schemas=' + schema + ' port=' + str(self.db.port())) else: outputdb = ogr.Open('pg: host =' + self.db.hostName() + ' dbname =' + self.db.databaseName() + ' schemas=' + schema + ' port=' + str(self.db.port()) + ' user='******'gst').GetGeometryColumn() except: geom_column = 'the_geom' ################################################## uri.setDataSource(schema, 'gst', geom_column) if not auth_user == None: uri.setUsername(auth_user) gst_lyr = QgsVectorLayer(uri.uri(), "gst","postgres") #------------------------------------------------------ # Subset Suche: Gibts so ein Grundstück überhaupt? # erst wenn ja, dann wird geladen #------------------------------------------------------ fid = [] # Eingabefeld auslesen und gleich splitten gstliste = str.split(self.txtGstnr.text(),",") abfr_str = '' nummer = '' for gst in gstliste: if abfr_str == '': abfr_str = abfr_str + 'gnr = \'' + str.strip(gst) + '\' ' nummer = nummer + gst + " " else: abfr_str = abfr_str + 'or gnr = \'' + str.strip(gst) + '\' ' nummer = nummer + gst + " " gst_lyr.setSubsetString('(' + abfr_str +') and kg = (\'' + self.kgnummer + '\')') gst_lyr.selectAll() fid = gst_lyr.selectedFeatureIds() #------------------------------------------------------ # Ende Subset Suche #------------------------------------------------------ #Wurde was gefunden? ja/nein if gst_lyr.selectedFeatureCount() >= 1: #Eins gefunden, Textfeld und Zoompunkt festlegen self.gefunden.setText(("Grundstück ") + nummer + " in KG " + self.Kgemeinde + " gefunden") self.zoompunkt = gst_lyr.boundingBoxOfSelected() # Erstmal die Gemeinde laden self.ladeGemeinde() # Ein Problem haben wir, da die FIDs der Layer nicht übereinstimmenm, # da diese aus einem VIEW stammen und im Modul Projektimport zugewiesen werden # um zu selektieren den geladenen Layer suchen #for lyr_tmp in self.iface.legendInterface().layers(): for lyr_tmp_d in QgsProject.instance().mapLayers(): # vergisst und auch bei einem refresh nicht richtig macht.... #if lyr_tmp.name() == ("Grundstücke-") + self.Gemeinde + ' (a)': lyr_tmp = QgsProject.instance().mapLayers()[lyr_tmp_d] if lyr_tmp.name() == ("Grundstücke-") + self.Gemeinde + ' (a)': #lyr_tmp = QgsProject.instance().mapLayers()[lyr_tmp_d] #if lyr_tmp.name() == ("Grundstücke-") + 'Vorarlberg (a)': # und nochmal die Subset auswahl durchführen # FIDS abfragen, Subset zurücksetzen und FIDS selektieren if not fid is None: lyr_tmp.setSubsetString('(' + abfr_str +') and kg = (\'' + self.kgnummer + '\')') lyr_tmp.selectAll() fid = lyr_tmp.selectedFeatureIds() lyr_tmp.setSubsetString('') lyr_tmp.selectByIds(fid) # und selektieren else: #nichts gefunden: Textfeld und Zoompunkt zurücksetzen self.gefunden.setText(("Grundstück ") + self.txtGstnr.text() + " in KG " + self.Kgemeinde + " nicht gefunden") self.zoompunkt = None
def process_incoming(incoming, id_string): # assign variables if len(incoming) >= 2: identity = incoming[0].strip().lower() text = incoming[1].strip().lower() # if the tuple contains an id_string, use it, otherwise default if id_string is None and len(incoming) >= 3: id_string = incoming[2] else: responses.append({'code': SMS_API_ERROR, 'text': _(u"Missing 'identity' " u"or 'text' field.")}) return if not len(identity.strip()) or not len(text.strip()): responses.append({'code': SMS_API_ERROR, 'text': _(u"'identity' and 'text' fields can " u"not be empty.")}) return # if no id_string has been supplied # we expect the SMS to be prefixed with the form's sms_id_string if id_string is None: keyword, text = [s.strip() for s in text.split(None, 1)] xform = XForm.objects.get(user__username=username, sms_id_string=keyword) else: xform = XForm.objects.get(user__username=username, id_string=id_string) if not xform.allows_sms: responses.append({'code': SMS_SUBMISSION_REFUSED, 'text': _(u"The form '%(id_string)s' does not " u"accept SMS submissions.") % {'id_string': xform.id_string}}) return # parse text into a dict object of groups with values json_submission, medias_submission, notes = parse_sms_text(xform, identity, text) # retrieve sms_response if exist in the form. json_survey = json.loads(xform.json) if json_survey.get('sms_response'): resp_str.update({'success': json_survey.get('sms_response')}) # check that the form contains at least one filled group meta_groups = sum([1 for k in list(json_submission) if k.startswith('meta')]) if len(list(json_submission)) <= meta_groups: responses.append({'code': SMS_PARSING_ERROR, 'text': _(u"There must be at least one group of " u"questions filled.")}) return # check that required fields have been filled required_fields = [f.get('name') for g in json_survey.get('children', {}) for f in g.get('children', {}) if f.get('bind', {}).get('required', 'no') == 'yes'] submitted_fields = {} for group in json_submission.values(): submitted_fields.update(group) for field in required_fields: if not submitted_fields.get(field): responses.append({'code': SMS_SUBMISSION_REFUSED, 'text': _(u"Required field `%(field)s` is " u"missing.") % {'field': field}}) return # convert dict object into an XForm string xml_submission = dict2xform(jsform=json_submission, form_id=xform.id_string) # compute notes data = {} for g in json_submission.values(): data.update(g) for idx, note in enumerate(notes): try: notes[idx] = note.replace('${', '{').format(**data) except Exception as e: logging.exception(_(u'Updating note threw exception: %s' % text(e))) # process_incoming expectes submission to be a file-like object xforms.append(BytesIO(xml_submission.encode('utf-8'))) medias.append(medias_submission) json_submissions.append(json_submission) xforms_notes.append(notes)
def corvee(n1,n2): re.rg("gdump") nx=re.nx ; ny=re.ny ; nz=re.nz gdet=re.gdet ; gcov=re.gcov ; _dx2=re._dx2 ; _dx3=re._dx3 ; drdx=re.drdx r=re.r ; h=re.h ; phi=re.ph # importing coordinate mesh # velocities: uufile='merge_uu.dat' udfile='merge_ud.dat' fuu=open(uufile, 'r') fud=open(udfile, 'r') # s=str.split(str.strip(fuu.readline())) # mean velocity field uumean=zeros([4,nx,ny,nz], dtype=double) udmean=zeros([4,nx,ny,nz], dtype=double) for kx in arange(nx): for ky in arange(ny): s=str.split(str.strip(fuu.readline())) uumean[0,kx,ky,:]=double(s[0]) uumean[1,kx,ky,:]=double(s[1]) uumean[2,kx,ky,:]=double(s[2]) uumean[3,kx,ky,:]=double(s[3]) s=str.split(str.strip(fud.readline())) udmean[0,kx,ky,:]=double(s[0]) udmean[1,kx,ky,:]=double(s[1]) udmean[2,kx,ky,:]=double(s[2]) udmean[3,kx,ky,:]=double(s[3]) fuu.close() fud.close() # tetrad components: t0=tetrad_t(uumean, udmean) tr=tetrad_r(uumean, udmean) th=tetrad_h(uumean, udmean) tp=tetrad_p(uumean, udmean) # print shape(tr) nframes=n2-n1+1 n=n1+arange(nframes) for k in n: fname=re.dumpname(k) re.rd(fname) uu=re.uu ; ud=re.ud ; rho=re.rho if(k==n1): rhomean=rho # velocity components: uu0=uu[0]*drdx[0,0]-uumean[0] ; ud0=old_div(ud[0],drdx[0,0])-udmean[0] uur=uu[1]*drdx[1,1]-uumean[1] ; udr=old_div(ud[1],drdx[1,1])-udmean[1] uuh=uu[2]*drdx[2,2]-uumean[2] ; udh=old_div(ud[2],drdx[2,2])-udmean[2] uup=uu[3]*drdx[3,3]-uumean[3] ; udp=old_div(ud[3],drdx[3,3])-udmean[3] tuur=(uu0*tr[0]+uur*tr[1]+uuh*tr[2]+uup*tr[3]) # co-moving velocity components tuuh=(uu0*th[0]+uur*th[1]+uuh*th[2]+uup*th[3]) tuup=(uu0*tp[0]+uur*tp[1]+uuh*tp[2]+uup*tp[3]) drh=rho*tuur*tuuh ; drp=rho*tuur*tuup ; dhp=rho*tuuh*tuup drr=rho*tuur*tuur ; dpp=rho*tuup*tuup ; dhh=rho*tuuh*tuuh # print(shape(drh)) # print(shape(rho)) # print(shape(tuur)) else: rhomean+=rho # velocity components: uu0=uu[0]-uumean[0] ; ud0=ud[0]-udmean[0] uur=uu[1]-uumean[1] ; udr=ud[1]-udmean[1] uuh=uu[2]-uumean[2] ; udh=ud[2]-udmean[2] uup=uu[3]-uumean[3] ; udp=ud[3]-udmean[3] tuur=(uu0*tr[0]+uur*tr[1]+uuh*tr[2]+uup*tr[3]) tuuh=(uu0*th[0]+uur*th[1]+uuh*th[2]+uup*th[3]) tuup=(uu0*tp[0]+uur*tp[1]+uuh*tp[2]+uup*tp[3]) drh+=rho*tuur*tuuh ; drp+=rho*tuur*tuup ; dhp+=rho*tuuh*tuup drr+=rho*tuur*tuur ; dpp+=rho*tuup*tuup ; dhh+=rho*tuuh*tuuh drh/=rhomean ; drp/=rhomean ; dhp/=rhomean drr/=rhomean ; dpp/=rhomean ; dhh/=rhomean drh=drh.mean(axis=2) ; drp=drp.mean(axis=2) ; dhp=dhp.mean(axis=2) drr=drr.mean(axis=2) ; dpp=dpp.mean(axis=2) ; dhh=dhh.mean(axis=2) fout=open('merge_corv.dat', 'w') for kx in arange(nx): for ky in arange(ny): # RR HH PP RH HP RP fout.write(str(drr[kx,ky])+' '+str(dhh[kx,ky])+' '+str(dpp[kx,ky])+' '+str(drh[kx,ky])+' '+str(dhp[kx,ky])+' '+str(drp[kx,ky])+'\n') fout.close()
def direk_laden(PGdb, lyr_name, shapename, pfad, iface, subset = None): # Der Username der verwendet werden soll if len(auth_user_global) > 0: # Ist belegt auth_user = auth_user_global[0] else: auth_user = None iface.layerTreeView().setCurrentLayer(None) # Damit von ganz aussen in der LEgende angefangen wird! try: db = PGdb shapename_ohne_suffix = shapename.replace('.shp','') shapename_ohne_suffix = str(str.strip(str.lower(shapename_ohne_suffix))) if db != None: try: # Geodatenbank ################################################ # Geometriespalte bestimmen -- geht nur mit OGR try: if auth_user == None: outputdb = ogr.Open('pg: host =' + db.hostName() + ' dbname =' + db.databaseName() + ' schemas=' + schema + ' port=' + str(db.port())) else: outputdb = ogr.Open('pg: host =' + db.hostName() + ' dbname =' + db.databaseName() + ' schemas=' + schema + ' port=' + str(db.port()) + ' user='******'the_geom' ################################################ #das Laden der Daten uri = QgsDataSourceUri() uri.setConnection(db.hostName(),str(db.port()),db.databaseName(),'','') if not auth_user == None: uri.setUsername(auth_user) uri.setDataSource('vorarlberg', shapename_ohne_suffix, geom_column) erg_lyr = QgsVectorLayer(uri.uri(), lyr_name,"postgres") # prüfen ob erfolgreich geladen if not erg_lyr.isValid(): # nicht erfolgreich QtWidgets.QMessageBox.about(None, "Fehler", "Layer " + shapename_ohne_suffix + " in der Datenbank nicht gefunden - es wird aufs Filesystem umgeschaltet") erg_lyr = QgsVectorLayer(pfad + '/' + shapename, lyr_name,"ogr") except Exception: # noch schlechter QtWidgets.QMessageBox.about(None, "Fehler", "Layer " + shapename_ohne_suffix + " in der Datenbank nicht gefunden - es wird aufs Filesystem umgeschaltet") erg_lyr = QgsVectorLayer(pfad + '/' + shapename, lyr_name,"ogr") elif db == None: erg_lyr = QgsVectorLayer(pfad + '/' + shapename, lyr_name,"ogr") # Hier die attributive Auswahl if subset != None: erg_lyr.setSubsetString(subset) # prüfen ob was sinnvolles geladen werden konnte if erg_lyr.isValid(): return erg_lyr else: QtWidgets.QMessageBox.about(None, "Fehler", "Layer " + shapename + " konnte nicht geladen werden") return None except Exception as b: return None
def __iter__(self, ngrams=None): r"""Generate a sequence of words or tokens, using a re.match iteratively through the str TODO: - need two different self.lower and lemmatize transforms, 1 before and 1 after nonword detection - each of 3 nonword filters on a separate line, setting w=None when nonword "hits" - refactor `nonwords` arg/attr to `ignore_stopwords` to be more explicit >>> doc = ("John D. Rock\n\nObjective: \n\tSeeking a position as Software --Architect-- / " + ... "_Project Lead_ that can utilize my expertise and") >>> doc += " experiences in business application development and proven records in delivering 90's software. " >>> doc += "\n\nSummary: \n\tSoftware Architect" >>> doc += (" who has gone through several full product-delivery life cycles from requirements " + ... "gathering to deployment / production, and") >>> doc += (" skilled in all areas of software development from client-side JavaScript to " + ... "database modeling. With strong experiences in:") >>> doc += " \n\tRequirements gathering and analysis." The python splitter will produce 2 tokens that are only punctuation ("/") >>> len([s for s in doc.split() if s]) 72 The built-in nonword REGEX ignores all-punctuation words, so there are 2 less here: >>> len(list(Tokenizer(doc, strip=False, nonwords=False))) 70 In addition, punctuation at the end of tokens is stripped so "D. Rock" doesn't tokenize to "D." but rather "D" >>> run_together_tokens = ''.join(list(Tokenizer(doc, strip=False, nonwords=False))) >>> '/' in run_together_tokens or ':' in ''.join(run_together_tokens) False But you can turn off stripping when instantiating the object. >>> all(t in Tokenizer(doc, strip=False, nonwords=True) for t in ... ('D', '_Project', 'Lead_', "90's", "product-delivery")) True """ ngrams = ngrams or self.ngrams # FIXME: Improve memory efficiency by making this ngram tokenizer an actual generator if ngrams > 1: original_tokens = list(self.__iter__(ngrams=1)) for tok in original_tokens: yield tok for i in range(2, ngrams + 1): for tok in list_ngrams(original_tokens, n=i, join=' '): yield tok else: for w in self.regex.finditer(self.doc): if w: w = w.group() w = w if not self.strip_chars else str.strip(w, self.strip_chars) w = w if not self.strip else self.strip(w) w = w if not self.stem else self.stem(w) w = w if not self.lemmatize else self.lemmatize(w) w = w if not self.lower else self.lower(w) # FIXME: nonword check before and after preprossing? (lower, lemmatize, strip, stem) # 1. check if the default nonwords REGEX filter is requested, if so, use it. # 2. check if a customized nonwords REGES filter is provided, if so, use it. # 3. make sure the word isn't in the provided (or empty) set of nonwords if w and (not self.nonwords or not re.match(r'^' + RE_NONWORD + '$', w)) and ( not self.nonwords_regex or not self.nonwords_regex.match(w)) and ( w not in self.nonwords_set): yield w
def velread(prefix='merge_', nope=False, ifaphi=True): # prefix = 'titan2/merge_' rfile = prefix + '_r.dat' fr = open(rfile, 'r') s = str.split(str.strip(fr.readline())) r = [] while (s): r.append(s[0]) s = str.split(str.strip(fr.readline())) fr.close() r = asarray(r, dtype=double) hfile = prefix + '_h.dat' fh = open(hfile, 'r') s = str.split(str.strip(fh.readline())) h = [] while (s): h.append(s[0]) s = str.split(str.strip(fh.readline())) fh.close() h = asarray(h, dtype=double) h2, r2 = meshgrid(h, r) nr, nh = shape(h2) # density: rhofile = prefix + '_rho.dat' frho = open(rhofile, 'r') s = str.split(str.strip(frho.readline())) rho = [] while (s): rho.append(s[0]) s = str.split(str.strip(frho.readline())) frho.close() rho = asarray(rho, dtype=double) rho = reshape(rho, [nr, nh]) # pressure: pfile = prefix + '_p.dat' fp = open(pfile, 'r') s = str.split(str.strip(fp.readline())) p = [] while (s): p.append(s[0]) s = str.split(str.strip(fp.readline())) fp.close() p = asarray(p, dtype=double) p = reshape(p, [nr, nh]) # magnetic pressure: pfile = prefix + '_mp.dat' fmp = open(pfile, 'r') s = str.split(str.strip(fmp.readline())) mp = [] while (s): mp.append(s[0]) s = str.split(str.strip(fmp.readline())) fmp.close() mp = asarray(mp, dtype=double) mp = reshape(mp, [nr, nh]) # velocities: uufile = prefix + '_uu.dat' udfile = prefix + '_ud.dat' puufile = prefix + '_puu.dat' pudfile = prefix + '_pud.dat' mpuufile = prefix + '_mpuu.dat' mpudfile = prefix + '_mpud.dat' uu0, uur, uuh, uup = rk.uread(uufile, [nr, nh]) # density-averaged velocity ud0, udr, udh, udp = rk.uread(udfile, [nr, nh]) # density-averaged velocity u0 = sqrt(fabs(uu0 * ud0)) ur = sqrt(fabs(uur * udr)) * sign(uur) uh = sqrt(fabs(uuh * udh)) * sign(uuh) up = sqrt(fabs(uup * udp)) * sign(uup) if (nope): pu0, pur, puh, pup = u0, ur, uh, up mpu0, mpur, mpuh, mpup = u0, ur, uh, up else: puu0, puur, puuh, puup = rk.uread( puufile, [nr, nh]) # pressure-averaged velocity pud0, pudr, pudh, pudp = rk.uread( pudfile, [nr, nh]) # pressure-averaged velocity mpuu0, mpuur, mpuuh, mpuup = rk.uread( mpuufile, [nr, nh]) # pressure-averaged velocity mpud0, mpudr, mpudh, mpudp = rk.uread( mpudfile, [nr, nh]) # pressure-averaged velocity pu0 = sqrt(fabs(puu0 * pud0)) pur = sqrt(fabs(puur * pudr)) * sign(puur) puh = sqrt(fabs(puuh * pudh)) * sign(puuh) pup = sqrt(fabs(puup * pudp)) * sign(puup) mpu0 = sqrt(fabs(mpuu0 * mpud0)) mpur = sqrt(fabs(mpuur * mpudr)) * sign(mpuur) mpuh = sqrt(fabs(mpuuh * mpudh)) * sign(mpuuh) mpup = sqrt(fabs(mpuup * mpudp)) * sign(mpuup) if (ifaphi): # vector potential A_phi: pfile = prefix + '_aphi.dat' faphi = open(pfile, 'r') s = str.split(str.strip(faphi.readline())) aphi = [] while (s): aphi.append(s[0]) s = str.split(str.strip(faphi.readline())) faphi.close() aphi = asarray(aphi, dtype=double) aphi = reshape(aphi, [nr, nh]) else: aphi = rho return r2, h2, rho, p, mp, u0, ur, uh, up, pu0, pur, puh, pup, mpu0, mpur, mpuh, mpup, aphi
def parseDMSString(str, order=0): '''Parses a pair of coordinates that are in the order of "latitude, longitude". The string can be in DMS or decimal degree notation. If order is 0 then then decimal coordinates are assumed to be in Lat Lon order otherwise they are in Lon Lat order. For DMS coordinates it does not matter the order.''' str = str.strip().upper() # Make it all upper case try: if re.search("[NSEW]", str) == None: # There were no annotated dms coordinates so assume decimal degrees # Remove any characters that are not digits and decimal str = re.sub("[^\d.+-]+", " ", str).strip() coords = re.split('\s+', str, 1) if len(coords) != 2: raise ValueError('Invalid Coordinates') if order == 0: lat = float(coords[0]) lon = float(coords[1]) else: lon = float(coords[0]) lat = float(coords[1]) else: # We should have a DMS coordinate if re.search('[NSEW]\s*\d+.+[NSEW]\s*\d+', str) == None: # We assume that the cardinal directions occur after the digits m = re.findall('(.+)\s*([NS])[\s,;:]*(.+)\s*([EW])', str) if len(m) != 1 or len(m[0]) != 4: # This is either invalid or the coordinates are ordered by lon lat m = re.findall('(.+)\s*([EW])[\s,;:]*(.+)\s*([NS])', str) if len(m) != 1 or len(m[0]) != 4: # Now we know it is invalid raise ValueError('Invalid DMS Coordinate') else: # The coordinates were in lon, lat order lon = LatLon.parseDMS(m[0][0], m[0][1]) lat = LatLon.parseDMS(m[0][2], m[0][3]) else: # The coordinates are in lat, lon order lat = LatLon.parseDMS(m[0][0], m[0][1]) lon = LatLon.parseDMS(m[0][2], m[0][3]) else: # The cardinal directions occur at the beginning of the digits m = re.findall('([NS])\s*(\d+.*?)[\s,;:]*([EW])(.+)', str) if len(m) != 1 or len(m[0]) != 4: # This is either invalid or the coordinates are ordered by lon lat m = re.findall('([EW])\s*(\d+.*?)[\s,;:]*([NS])(.+)', str) if len(m) != 1 or len(m[0]) != 4: # Now we know it is invalid raise ValueError('Invalid DMS Coordinate') else: # The coordinates were in lon, lat order lon = LatLon.parseDMS(m[0][1], m[0][0]) lat = LatLon.parseDMS(m[0][3], m[0][2]) else: # The coordinates are in lat, lon order lat = LatLon.parseDMS(m[0][1], m[0][0]) lon = LatLon.parseDMS(m[0][3], m[0][2]) except: raise ValueError('Invalid Coordinates') return lat, lon
def ascframe(prefix='dumps/dump000', xmax=40.): rfile = prefix + '_r.dat' hfile = prefix + '_h.dat' rhofile = prefix + '_rho.dat' pfile = prefix + '_p.dat' pmfile = prefix + '_pm.dat' uufile = prefix + '_uu.dat' udfile = prefix + '_ud.dat' bfile = prefix + '_b.dat' orifile = prefix + '_ori.dat' nr, nh, nphi, a, t = dinforead(prefix) # radial mesh: fr = open(rfile, 'r') s = str.split(str.strip(fr.readline())) r = [] while (s): r.append(s[0]) s = str.split(str.strip(fr.readline())) fr.close() r = asarray(r, dtype=double) nr = size(r) # polar angle mesh: fh = open(hfile, 'r') s = str.split(str.strip(fh.readline())) th = [] while (s): th.append(s[0]) s = str.split(str.strip(fh.readline())) fh.close() th = asarray(th, dtype=double) nh = size(th) # 2d-grid (order??) h2, r2 = meshgrid(th, r) print(shape(r2)) print(nr, nh) # density: frho = open(rhofile, 'r') s = str.split(str.strip(frho.readline())) rho = [] while (s): rho.append(s[0]) s = str.split(str.strip(frho.readline())) frho.close() rho = asarray(rho, dtype=double) rho = reshape(rho, [nr, nh]) # pressure: fp = open(pfile, 'r') fpm = open(pmfile, 'r') s = str.split(str.strip(fp.readline())) sm = str.split(str.strip(fpm.readline())) p = [] pm = [] while (s): p.append(s[0]) pm.append(sm[0]) s = str.split(str.strip(fp.readline())) sm = str.split(str.strip(fpm.readline())) fp.close() fpm.close() p = asarray(p, dtype=double) pm = asarray(pm, dtype=double) p = reshape(p, [nr, nh]) pm = reshape(pm, [nr, nh]) # velocity field: fuu = open(uufile, 'r') s = str.split(str.strip(fuu.readline())) ur = [] uh = [] omega = [] while (s): ur.append(s[1]) uh.append(s[2]) omega.append(old_div(double(s[3]), double(s[0]))) s = str.split(str.strip(fuu.readline())) fuu.close() ur = asarray(ur, dtype=double) uh = asarray(uh, dtype=double) ur = reshape(ur, [nr, nh]) uh = reshape(uh, [nr, nh]) omega = asarray(omega, dtype=double) omega = reshape(omega, [nr, nh]) # origin variables: fori = open(orifile, 'r') s = str.split(str.strip(fori.readline())) orr = [] orth = [] orphi = [] while (s): orr.append(s[0]) orth.append(s[1]) orphi.append(s[2]) s = str.split(str.strip(fori.readline())) fori.close() orr = reshape(asarray(orr, dtype=double), [nr, nh]) orth = reshape(asarray(orth, dtype=double), [nr, nh]) orphi = reshape(asarray(orphi, dtype=double), [nr, nh]) # magnetic field (the last component is A_\phi) fb = open(bfile, 'r') s = str.split(str.strip(fb.readline())) aphi = [] while (s): aphi.append(s[3]) s = str.split(str.strip(fb.readline())) fb.close() aphi = reshape(asarray(aphi, dtype=double), [nr, nh]) print("size(aphi) = " + str(shape(aphi))) rhor = 1. + sqrt(1. - a**2) cmap = plt.get_cmap('jet') ono = 30 lmin = -5. lmax = 1. lrholevs = (lmax - lmin) * arange(ono) / double(ono) + lmin norm = BoundaryNorm(lrholevs, ncolors=cmap.N, clip=True) x = r2 * sin(h2) y = r2 * cos(h2) clf() fig = figure() contourf(x, y, log10(rho + 1e-3), levels=lrholevs, norm=norm, cmap=cmap) contour(x, y, aphi, colors='k') xlim(0., xmax) ylim(-xmax / 4., xmax / 2.) bhole(rhor) # need to put time in dinfo! title('t=' + str(t) + ' (' + prefix + ')') savefig(prefix + '_rho.eps') savefig(prefix + '_rho.png') close() nxx = 10 rlevs = xmax * np.arange(nxx) / np.double(nxx) thlevs = np.pi * np.arange(nxx) / np.double(nxx) clf() contourf(x, y, orr, cmap=cmap, levels=rlevs) colorbar() contour(x, y, r2, colors='w', levels=rlevs) contour(x, y, h2, colors='w', levels=thlevs) contour(x, y, orr, colors='k', levels=rlevs) contour(x, y, orth, colors='k', levels=thlevs) plt.xlim(0., xmax) plt.ylim(-xmax / 4., xmax / 2.) plt.savefig(prefix + "_ori.png") close() lmin = -2. lmax = 5. lbetalevs = (lmax - lmin) * arange(ono) / double(ono) + lmin norm = BoundaryNorm(lbetalevs, ncolors=cmap.N, clip=True) clf() fig = figure() contourf(r2 * sin(h2), r2 * cos(h2), log10(p / pm), levels=lbetalevs, norm=norm, cmap=cmap) colorbar() contour(r2 * sin(h2), r2 * cos(h2), aphi, colors='k') xlim(0., xmax) ylim(-xmax / 2., xmax / 2.) bhole(rhor) # need to put time in dinfo! title('t=' + str(t) + ' (' + prefix + ')') savefig(prefix + '_beta.eps') savefig(prefix + '_beta.png') close() vlevs = (arange(ono) / double(ono) * 2. - 1.) * 0.1 wv = where((r2 < 10.) & (r2 > 5.) & (fabs(cos(h2)) < 0.25)) # vlevs[0]=ur[wv].min()*1.5 # vlevs[ono-1]=ur[wv].max()*1.5 norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) clf() fig = figure() contourf(r2 * sin(h2), r2 * cos(h2), ur, levels=vlevs, norm=norm) colorbar() contour(r2 * sin(h2), r2 * cos(h2), ur, levels=[0.], color='w', linestyles='dotted') contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), levels=lrholevs, colors='w') xlim(0., xmax) ylim(-xmax / 2., xmax / 2.) bhole(rhor) title('t=' + str(t)) savefig(prefix + '_ur.eps') savefig(prefix + '_ur.png') close() clf() fig = figure() contourf(r2 * sin(h2), r2 * cos(h2), omega, levels=vlevs, norm=norm) colorbar() contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), levels=lrholevs, colors='w') xlim(0., xmax) ylim(-xmax / 2., xmax / 2.) bhole(rhor) title('t=' + str(t)) savefig(prefix + '_o.eps') savefig(prefix + '_o.png') close() nx = 20 ny = 20 xmin = 0. ymin = -xmax / 4. ymax = xmax / 2. xflow = (xmax - xmin) * (arange(nx) + 0.5) / double(nx) + xmin yflow = (ymax - ymin) * (arange(ny) + 0.5) / double(ny) + ymin x2, y2 = meshgrid(xflow, yflow) xgrid = (r2 * sin(h2)).flatten() ygrid = (r2 * cos(h2)).flatten() vxflow = (ur * sin(h2) + uh * cos(h2)).flatten() vyflow = (-uh * sin(h2) + ur * cos(h2)).flatten() vx = griddata(list(zip(xgrid, ygrid)), vxflow, (x2, y2), method='nearest') vy = griddata(list(zip(xgrid, ygrid)), vyflow, (x2, y2), method='nearest') vmin = 1e-8 # sqrt((vx**2+vy**2)).min()*9. vmax = 1.0 # sqrt((vx**2+vy**2)).max()*1.1 vlevs = log10( (old_div(vmax, vmin))**(old_div(arange(20), double(19))) * vmin) vlevs[0] = -30. norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) clf() fig = figure() contourf(r2 * sin(h2), r2 * cos(h2), log10(sqrt(ur**2 + uh**2)), levels=vlevs, norm=norm) colorbar() title('t=' + str(t)) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), levels=lrholevs, colors='w') streamplot(xflow, yflow, vx, vy, color='k') xlim(xmin, xmax) ylim(ymin, ymax) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) plot([arms(a), arms(a)], [-1., 1.], color='w') #, linestyle='dotted') fig.set_size_inches(10, 6) savefig(prefix + '_stream.eps') savefig(prefix + '_stream.png') close()
def tedplotter(dire): nr, nh, nphi, a, t = dinforead(dire + '/merge') rfile = dire + '/merge_r.dat' fr = open(rfile, 'r') s = str.split(str.strip(fr.readline())) r = [] while (s): r.append(s[0]) s = str.split(str.strip(fr.readline())) fr.close() r = asarray(r, dtype=double) # nr=size(r) # polar angle mesh: hfile = '/home/pasha/harm/harmpi/' + dire + '/merge_h.dat' fh = open(hfile, 'r') s = str.split(str.strip(fh.readline())) th = [] while (s): th.append(s[0]) s = str.split(str.strip(fh.readline())) fh.close() th = asarray(th, dtype=double) # nh=size(th) # 2d-grid (order??) h2, r2 = meshgrid(th, r) print(shape(r2)) print(nr, nh) # pressure: pfile = dire + '/merge_p.dat' fp = open(pfile, 'r') s = str.split(str.strip(fp.readline())) p = [] while (s): p.append(s[0]) s = str.split(str.strip(fp.readline())) fp.close() p = asarray(p, dtype=double) p = reshape(p, [nr, nh]) # TudMA, TudEM trr = [] thh = [] tpp = [] trp = [] thp = [] tmafile = dire + '/merge_tudma.dat' ftma = open(tmafile, 'r') s = str.split(str.strip(ftma.readline())) rho = [] while (s): trr.append(s[5]) thh.append(s[10]) tpp.append(s[15]) trp.append(s[7]) thp.append(s[11]) s = str.split(str.strip(ftma.readline())) ftma.close() trr = reshape(asarray(trr, dtype=double), [nr, nh]) thh = reshape(asarray(thh, dtype=double), [nr, nh]) tpp = reshape(asarray(tpp, dtype=double), [nr, nh]) trp = reshape(asarray(trp, dtype=double), [nr, nh]) thp = reshape(asarray(thp, dtype=double), [nr, nh]) emtrr = [] emthh = [] emtpp = [] emtrp = [] emthp = [] temfile = dire + '/merge_tudem.dat' ftem = open(temfile, 'r') s = str.split(str.strip(ftem.readline())) rho = [] while (s): emtrr.append(s[5]) emthh.append(s[10]) emtpp.append(s[15]) emtrp.append(s[7]) emthp.append(s[11]) s = str.split(str.strip(ftem.readline())) ftem.close() emtrr = reshape(asarray(emtrr, dtype=double), [nr, nh]) emthh = reshape(asarray(emthh, dtype=double), [nr, nh]) emtpp = reshape(asarray(emtpp, dtype=double), [nr, nh]) emtrp = reshape(asarray(emtrp, dtype=double), [nr, nh]) emthp = reshape(asarray(emthp, dtype=double), [nr, nh]) alevs1 = 1e-3 * 0.5 alevs2 = 1.0 * 0.5 na = 30 alevs = (old_div(alevs2, alevs1))**(old_div(arange(na), double(na - 1))) * alevs1 alevs = around(alevs, 3) alevs[0] = 0. alevs = unique(alevs) cmap = plt.get_cmap('jet') cmap.set_bad('white', 1.) norm = BoundaryNorm(alevs, ncolors=cmap.N, clip=False) rmax = 15. rhor = 1. + (1. - a**2)**0.5 clf() fig = figure() subplot(121) contourf(r2 * sin(h2), r2 * cos(h2), fabs(old_div((trp + emtrp), p)), levels=alevs, norm=norm, cmap=cmap) colorbar() contour(r2 * sin(h2), r2 * cos(h2), (old_div((trp + emtrp), p)), colors='w', levels=[0.]) contour(r2 * sin(h2), r2 * cos(h2), p, colors='w', linestyles='dotted') xlim(0., rmax) ylim(old_div(-rmax, 2.), old_div(rmax, 2.)) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) title(r'$\alpha_{r\varphi}$') subplot(122) contourf(r2 * sin(h2), r2 * cos(h2), fabs(old_div((thp + emthp), p)), levels=alevs, norm=norm, cmap=cmap) colorbar() contour(r2 * sin(h2), r2 * cos(h2), (old_div((thp + emthp), p)), colors='w', levels=[0.]) contour(r2 * sin(h2), r2 * cos(h2), p, colors='w', linestyles='dotted') xlim(0., rmax) ylim(old_div(-rmax, 2.), old_div(rmax, 2.)) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) title(r'$\alpha_{z\varphi}$') fig.set_size_inches(15, 5) fig.tight_layout(pad=0., h_pad=-2.) savefig(dire + '/alphas.eps') close()
def mplotter(dire='.', nope=False): dmatrix = False nr, nh, nphi, a, t = dinforead(dire + '/merge') r2, h2, rho, p, pm, u0, ur, uh, up, pu0, pur, puh, pup, mpu0, mpur, mpuh, mpup, aphi = velread( dire + '/merge') # # velocity correlation matrix: if (dmatrix): dfile = dire + '/merge_corv.dat' fd = open(dfile, 'r') # s=str.split(str.strip(fd.readline())) dxy = zeros([3, 3, nr, nh], dtype=double) # vtrace1=zeros([nr,nh], dtype=double) for kx in arange(nr): for ky in arange(nh): s = str.split(str.strip(fd.readline())) dxy[0, 0, kx, ky] = double(s[0]) dxy[1, 1, kx, ky] = double(s[1]) dxy[2, 2, kx, ky] = double(s[2]) dxy[0, 1, kx, ky] = double(s[3]) dxy[1, 0, kx, ky] = double(s[3]) dxy[1, 2, kx, ky] = double(s[4]) dxy[2, 1, kx, ky] = double(s[4]) dxy[0, 2, kx, ky] = double(s[5]) dxy[2, 0, kx, ky] = double(s[5]) # vtrace = trace(dxy, axis1=0, axis2=1) # print "vtrace = "+str(vtrace1.min())+" to "+str(vtrace1.max()) # print "vtrace = "+str(vtrace1.min())+" to "+str(vtrace1.max()) # vertical slice: rrangemin = 10. rrangemax = 12. rrange = double((r2 > rrangemin) * (r2 < rrangemax)) # averaging over radial velocity vtracemean = old_div((vtrace * rho * rrange).mean(axis=0), (rho * rrange).mean(axis=0)) urmean = old_div((ur * rho * rrange).mean(axis=0), (rho * rrange).mean(axis=0)) upmean = old_div((up * rho * rrange).mean(axis=0), (rho * rrange).mean(axis=0)) uhmean = old_div((uh * rho * rrange).mean(axis=0), (rho * rrange).mean(axis=0)) th = unique(h2) fig = figure() clf() plot(cos(th), sqrt(vtracemean), label='velocity RMS', color='b') plot(cos(th), urmean, label='radial velocity', color='r') plot(cos(th), -urmean, color='r', linestyle='dotted') plot(cos(th), upmean, label='rotation velocity', color='k') # plot(cos(th), th*0.+rrangemin/(rrangemin**1.5+a), color='k', linestyle='dotted') # plot(cos(th), th*0.+rrangemax/(rrangemax**1.5+a), color='k', linestyle='dotted') plot(cos(th), uhmean, label='latitudinal velocity', color='g') plot(cos(th), -uhmean, color='g', linestyle='dotted') yscale('log') legend(loc='best') xlabel(r'$\cos\theta$') ylabel('$v/c$') fig.set_size_inches(12, 6) savefig(dire + '/velcompare.eps') close() ono = 20 # number of angular frequency levels rmin = old_div(h.Risco(a), 2.) rhor = 1. + (1. - a**2)**0.5 rmax = 20. rlevs = (rmax / rmin * 1.5)**(old_div(arange(ono), double(ono))) * rmin olevs = old_div(1., (rlevs**1.5 + a)) olevs = olevs[::-1] olevs[ono - 1] = olevs.max() * 10. cmap = plt.get_cmap('jet') cmap.set_bad('white', 1.) # grr=1./(1.-2./r+a**2/r**2) norm = BoundaryNorm(olevs, ncolors=cmap.N, clip=True) # density plot: clf() contourf(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), cmap=cmap, nlevels=30) contour(r2 * sin(h2), r2 * cos(h2), aphi, colors='k') xlim(0., rmax) ylim(old_div(-rmax, 2.), old_div(rmax, 2.)) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) savefig(dire + '/rho.eps') savefig(dire + '/rho.png') # beta magnetization plot: beta1 = 0.1 beta2 = 1000. nbeta = 30 betalevs = log10( (old_div(beta2, beta1))**(old_div(arange(nbeta), double(nbeta - 1))) * beta1) clf() contourf(r2 * sin(h2), r2 * cos(h2), log10(old_div(p, pm)), levels=betalevs) colorbar() contour(r2 * sin(h2), r2 * cos(h2), log10(old_div(p, pm)), levels=[0.], colors='w', linewidths=2.) xlim(0., rmax) ylim(old_div(-rmax, 2.), old_div(rmax, 2.)) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) savefig(dire + '/beta.eps') savefig(dire + '/beta.png') # radial velocity clf() fig = figure() contourf(r2 * sin(h2), r2 * cos(h2), up, levels=olevs, norm=norm) colorbar() contour(r2 * sin(h2), r2 * cos(h2), old_div(1., ((r2 * sin(h2))**1.5 + a)), colors='k', levels=olevs, linewidths=1) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) fig.set_size_inches(8, 8) savefig(dire + '/omega.eps') savefig(dire + '/omega.png') vlevs = (arange(ono) / double(ono) * 2. - 1.) * 0.01 vlevs[0] = ur.min() * 1.1 vlevs[ono - 1] = ur.max() * 1.1 norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) hdisk = 0.25 wdisk = double(fabs(cos(h2)) < hdisk) wwind = double(fabs(cos(h2)) > hdisk) urmean = old_div(((rho * ur) * wdisk).mean(axis=1), (rho * wdisk).mean(axis=1)) urmeanp = old_div(((p * pur) * wdisk).mean(axis=1), (p * wdisk).mean(axis=1)) # flow lines: nx = 20 ny = 21 xmin = 0. xmax = 30. ymin = -10. ymax = 10. cs = 1. #p/rho/(4./3.) xflow = (xmax - xmin) * (arange(nx) + 0.5) / double(nx) + xmin yflow = (ymax - ymin) * (arange(ny) + 0.5) / double(ny) + ymin x2, y2 = meshgrid(xflow, yflow) # vxfun=interp2d(r2*sin(h2), r2*cos(h2), ur*sin(h2)+uh*cos(h2),kind='linear') # vyfun=interp2d(r2*sin(h2), r2*cos(h2), -uh*sin(h2)+ur*cos(h2),kind='linear') # vx=vxfun(xflow, yflow) ; vy=vyfun(xflow, yflow) xgrid = (r2 * sin(h2)).flatten() ygrid = (r2 * cos(h2)).flatten() vxflow = (ur / cs * sin(h2) + uh / cs * cos(h2)).flatten() vyflow = (-uh / cs * sin(h2) + ur / cs * cos(h2)).flatten() pvxflow = (pur / cs * sin(h2) + puh / cs * cos(h2)).flatten() pvyflow = (-puh / cs * sin(h2) + pur / cs * cos(h2)).flatten() # vxflow=xgrid ; vyflow=ygrid vx = griddata(list(zip(xgrid, ygrid)), vxflow, (x2, y2), method='nearest') vy = griddata(list(zip(xgrid, ygrid)), vyflow, (x2, y2), method='nearest') pvx = griddata(list(zip(xgrid, ygrid)), pvxflow, (x2, y2), method='nearest') pvy = griddata(list(zip(xgrid, ygrid)), pvyflow, (x2, y2), method='nearest') vmin = 1e-8 # sqrt((vx**2+vy**2)).min()*9. vmax = 0.1 # sqrt((vx**2+vy**2)).max()*1.1 vlevs = log10( (old_div(vmax, vmin))**(old_div(arange(20), double(19))) * vmin) vlevs[0] = -30. norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) # vmax=0.01 # vmin=-0.01 # vlevs=(vmax-vmin)*(arange(20)/double(19))+vmin # norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) clf() fig = figure() contourf(r2 * sin(h2), r2 * cos(h2), log10(old_div(sqrt(ur**2 + uh**2), cs)), levels=vlevs, norm=norm) # contourf(xflow, yflow, sqrt(vx**2+vy**2),levels=vlevs,norm=norm) colorbar() streamplot(xflow, yflow, pvx, pvy, color='k') streamplot(xflow, yflow, vx, vy, color='w') xlim(xmin, xmax) ylim(ymin, ymax) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) fig.set_size_inches(15, 8) savefig(dire + '/stream.eps') savefig(dire + '/stream.png') close() # near eqplane: xscale = 10. nx = 7 ny = 5 xflow = xscale * (arange(nx)) / double(nx - 1) yflow = xscale * hdisk * ((arange(ny)) / double(ny - 1) * 2. - 1.) x2, y2 = meshgrid(xflow, yflow) vx = griddata(list(zip(xgrid, ygrid)), vxflow, (x2, y2), method='nearest') vy = griddata(list(zip(xgrid, ygrid)), vyflow, (x2, y2), method='nearest') pvx = griddata(list(zip(xgrid, ygrid)), pvxflow, (x2, y2), method='nearest') pvy = griddata(list(zip(xgrid, ygrid)), pvyflow, (x2, y2), method='nearest') vratmin = 0.6 # 0.2 vratmax = 1.1 # 1. nv = 10 vratlevs = (arange(nv + 1)) / double(nv) * (vratmax - vratmin) + vratmin # vratlevs[9]=1.3 clf() fig = figure() # (sqrt(pur**2+puh**2))/(sqrt(ur**2+uh**2)) contourf(r2 * sin(h2), r2 * cos(h2), old_div(pur, ur), levels=vratlevs, cmap='jet') colorbar() contour(r2 * sin(h2), r2 * cos(h2), old_div(pur, ur), levels=[1.], colors='w') plot([0., xscale], [0., 0.], color='k', linestyle='dotted') # streamplot(xflow, yflow, pvx, pvy,color='k') streamplot(xflow, yflow, vx, vy, color='k') xlim(0.5, xscale) ylim(-xscale * hdisk, xscale * hdisk) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) fig.set_size_inches(5 * 2 + 1, 5 * 2 * hdisk + 1.5) fig.tight_layout(pad=0.5) savefig(dire + '/streamband.eps') savefig(dire + '/streamband.png') close() vratmin = 0.5 # 0.2 vratmax = 2.5 # 1. nv = 10 vratlevs = (arange(nv + 1)) / double(nv) * (vratmax - vratmin) + vratmin clf() fig = figure() # (sqrt(pur**2+puh**2))/(sqrt(ur**2+uh**2)) contourf(r2 * sin(h2), r2 * cos(h2), old_div(mpur, pur), levels=vratlevs, cmap='jet') colorbar() contour(r2 * sin(h2), r2 * cos(h2), old_div(mpur, pur), levels=[1.], colors='w') plot([0., xscale], [0., 0.], color='k', linestyle='dotted') # streamplot(xflow, yflow, pvx, pvy,color='k') streamplot(xflow, yflow, vx, vy, color='k') xlim(0.5, xscale) ylim(-xscale * hdisk, xscale * hdisk) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) fig.set_size_inches(5 * 2 + 1, 5 * 2 * hdisk + 1.5) fig.tight_layout(pad=0.5) savefig(dire + '/streamband_mag.eps') savefig(dire + '/streamband_mag.png') close() # vertical slice: rrange = double((r2 > 5.) * (r2 < 10.)) urhmean = old_div((ur * rho * rrange).mean(axis=0), (rho * rrange).mean(axis=0)) uhhmean = old_div((uh * rho * rrange).mean(axis=0), (rho * rrange).mean(axis=0)) urhmeanp = old_div((pur * p * rrange).mean(axis=0), (p * rrange).mean(axis=0)) uhhmeanp = old_div((puh * p * rrange).mean(axis=0), (p * rrange).mean(axis=0)) # print shape(urhmean) # print shape(h) th = unique(h2) clf() fig = figure() subplot(211) plot(cos(th), urhmean, color='k') plot(cos(th), urhmeanp, color='r') # plot(cos(th), uhhmean, color='k', linestyle='dotted') # plot(cos(th), uhhmeanp, color='r', linestyle='dotted') xlabel(r'$\cos\theta$') ylabel(r'$u^r$') ylim(-0.035, 0.005) xlim(-hdisk, hdisk) subplot(212) plot(cos(th), old_div(urhmeanp, urhmean), color='k') xlabel(r'$\cos\theta$') ylabel(r'$\langle u^r\rangle_p / \langle u^r\rangle_\rho$') xlim(-hdisk, hdisk) ylim(0., 1.) fig.set_size_inches(8, 6) fig.tight_layout(pad=1.0, h_pad=0.5, w_pad=0.5) savefig(dire + '/vverts.eps') savefig(dire + '/vverts.png') close() clf() contourf(r2 * sin(h2), r2 * cos(h2), uh, levels=vlevs, norm=norm) colorbar() # contour(r2*sin(h2), r2*cos(h2), 1./((r2*sin(h2))**1.5+a), colors='k',levels=olevs,linewidths=1) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) savefig(dire + '/uh.eps') # turbulent velocity parameters: vmin = 1e-8 vmax = 10. ono = 100 vlevs = (old_div(vmax, vmin))**(old_div(arange(ono), double(ono - 1))) * vmin norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) if (dmatrix & False): clf() fig = figure() # subplot(331) contourf(r2 * sin(h2), r2 * cos(h2), vtrace, levels=vlevs, norm=norm) colorbar() # contour(r2*sin(h2), r2*cos(h2), 1./((r2*sin(h2))**1.5+a), colors='k',levels=olevs,linewidths=1) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') # contour(r2*sin(h2), r2*cos(h2), cos(h2), colors='y',linestyles='dashed',levels=[-hdisk, hdisk]) xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) savefig(dire + '/vturb.eps') close() vlevs = (2. * (old_div(arange(ono), double(ono - 1))) - 1.) * 0.5 vlevs[0] = -1. vlevs[ono - 1] = 1. norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) clf() fig = figure() subplot(331) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[0, 0], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{rr}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(332) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[0, 1], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{r\theta}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(333) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[0, 2], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{r\varphi}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(334) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[1, 0], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{\theta r}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(335) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[1, 1], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{\theta\theta}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(336) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[1, 2], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{\theta\varphi}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(337) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[2, 0], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{\varphi r}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(338) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[2, 1], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{\varphi\theta}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(339) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[2, 2], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{\varphi\varphi}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) fig.set_size_inches(12, 12) fig.tight_layout(pad=1.0, h_pad=0.5, w_pad=0.5) savefig(dire + '/dmatrix.eps') close() if (dmatrix & False): # the tetrad has reasonable physical sense only if u^h << u^r,phi drrdisk = old_div((dxy[0, 0] * wdisk * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) dhhdisk = old_div((dxy[1, 1] * wdisk * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) dppdisk = old_div((dxy[2, 2] * wdisk * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) drpdisk = old_div((dxy[0, 2] * wdisk * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) drhdisk = old_div((dxy[0, 1] * wdisk * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) dhpdisk = old_div((dxy[1, 2] * wdisk * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) dhpdiskplus = old_div((dxy[1, 2] * wdisk * cos(h2) * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) # dhpdiskplus=(dxy[1,2]*wdisk*cos(h2)*rho).mean(axis=1)/(wdisk*rho).mean(axis=1) drhdiskplus = old_div((dxy[1, 0] * wdisk * cos(h2) * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) # drhdiskplus=(dxy[1,0]*wdisk*cos(h2)*rho).mean(axis=1)/(wdisk*rho).mean(axis=1) dtot = drrdisk + dhhdisk + dppdisk clf() plot(r, old_div(drrdisk, dtot), color='k') plot(r, old_div(dhhdisk, dtot), color='g') plot(r, old_div(dppdisk, dtot), color='r') plot(r, old_div(drpdisk, dtot), color='r', linestyle='dotted') plot(r, old_div(drhdisk, dtot), color='g', linestyle='dotted') plot(r, old_div(dhpdisk, dtot), color='orange', linestyle='dotted') plot(r, old_div(dhpdiskplus, dtot), color='orange', linestyle='dashed') plot(r, old_div(drhdiskplus, dtot), color='g', linestyle='dashed') plot(r * 0. + h.Risco(a), arange(nr) / double(nr - 1) * 2. - 1., color='k', linestyle='dotted') xlabel(r'$r$') ylabel(r'$\Delta_{ik} / \Delta_{\rm tot}$') xscale('log') xlim(1, 20) # ylim(-1e-2,1e-2) savefig(dire + '/dmatrix_rslice.eps') drrvert = old_div((dxy[0, 0] * rrange * rho).mean(axis=0), (rrange * rho).mean(axis=0)) dhhvert = old_div((dxy[1, 1] * rrange * rho).mean(axis=0), (rrange * rho).mean(axis=0)) dppvert = old_div((dxy[2, 2] * rrange * rho).mean(axis=0), (rrange * rho).mean(axis=0)) drpvert = old_div((dxy[0, 2] * rrange * rho).mean(axis=0), (rrange * rho).mean(axis=0)) drhvert = old_div((dxy[0, 1] * rrange * rho).mean(axis=0), (rrange * rho).mean(axis=0)) dhpvert = old_div((dxy[1, 2] * rrange * rho).mean(axis=0), (rrange * rho).mean(axis=0)) dvertot = drrvert + dhhvert + dppvert clf() plot(cos(th), old_div(drrvert, dvertot), color='k') plot(cos(th), old_div(dhhvert, dvertot), color='g') plot(cos(th), old_div(dppvert, dvertot), color='r') plot(cos(th), old_div(drpvert, dvertot), color='r', linestyle='dotted') plot(cos(th), old_div(drhvert, dvertot), color='k', linestyle='dotted') plot(cos(th), old_div(dhpvert, dvertot), color='orange', linestyle='dotted') xlabel(r'$\cos \theta$') ylabel(r'$\Delta_{ik} / \Delta_{\rm tot}$') savefig(dire + '/dmatrix_thslice.eps') close('all')
def process_incoming(incoming, id_string): # assign variables if len(incoming) >= 2: identity = incoming[0].strip().lower() text = incoming[1].strip().lower() # if the tuple contains an id_string, use it, otherwise default if id_string is None and len(incoming) >= 3: id_string = incoming[2] else: responses.append({ 'code': SMS_API_ERROR, 'text': _(u"Missing 'identity' " u"or 'text' field.") }) return if not len(identity.strip()) or not len(text.strip()): responses.append({ 'code': SMS_API_ERROR, 'text': _(u"'identity' and 'text' fields can " u"not be empty.") }) return # if no id_string has been supplied # we expect the SMS to be prefixed with the form's sms_id_string if id_string is None: keyword, text = [s.strip() for s in text.split(None, 1)] xform = XForm.objects.get(user__username=username, sms_id_string=keyword) else: xform = XForm.objects.get(user__username=username, id_string=id_string) if not xform.allows_sms: responses.append({ 'code': SMS_SUBMISSION_REFUSED, 'text': _(u"The form '%(id_string)s' does not " u"accept SMS submissions.") % { 'id_string': xform.id_string } }) return # parse text into a dict object of groups with values json_submission, medias_submission, notes = parse_sms_text( xform, identity, text) # retrieve sms_response if exist in the form. json_survey = json.loads(xform.json) if json_survey.get('sms_response'): resp_str.update({'success': json_survey.get('sms_response')}) # check that the form contains at least one filled group meta_groups = sum( [1 for k in list(json_submission) if k.startswith('meta')]) if len(list(json_submission)) <= meta_groups: responses.append({ 'code': SMS_PARSING_ERROR, 'text': _(u"There must be at least one group of " u"questions filled.") }) return # check that required fields have been filled required_fields = [ f.get('name') for g in json_survey.get('children', {}) for f in g.get('children', {}) if f.get('bind', {}).get('required', 'no') == 'yes' ] submitted_fields = {} for group in json_submission.values(): submitted_fields.update(group) for field in required_fields: if not submitted_fields.get(field): responses.append({ 'code': SMS_SUBMISSION_REFUSED, 'text': _(u"Required field `%(field)s` is " u"missing.") % { 'field': field } }) return # convert dict object into an XForm string xml_submission = dict2xform(jsform=json_submission, form_id=xform.id_string) # compute notes data = {} for g in json_submission.values(): data.update(g) for idx, note in enumerate(notes): try: notes[idx] = note.replace('${', '{').format(**data) except Exception as e: logging.exception( _(u'Updating note threw exception: %s' % text(e))) # process_incoming expectes submission to be a file-like object xforms.append(BytesIO(xml_submission.encode('utf-8'))) medias.append(medias_submission) json_submissions.append(json_submission) xforms_notes.append(notes)
def importieren(self, pfad = None, liste = None, ergaenzungsname = None, anzeigename_ergaenzen = False, nach_unten = False, force_gruppenname = None, force_scale = None, DBschema_erweitern = True): # Der Username der verwendet werden soll if len(auth_user_global) > 0: # Ist belegt auth_user = auth_user_global[0] else: auth_user = None self.iface.layerTreeView().setCurrentLayer(None) # None entspricht einem Null Pointer -> Auswahl wird entfernt -> nicht ausgewählt # Wird in der Regel verwendet wenn # Gemeindespezifische Daten geladen werden # zwecks Übersichtlichkeit self.anzeigename_aendern = anzeigename_ergaenzen self.gruppen_erg_name = ergaenzungsname # oberste Gruppe/Layer wird mit diesem Namen ergänzt! if pfad == "": return # Das Qgis Projektfile ist ein XML und wird # hier eingelesen try: #pfad = 'd:/delme.qgs' #xml = file(pfad).read() #QtWidgets.QMessageBox.about(None, "Fehler", str(locale.getpreferredencoding())) project_file = open(pfad,'r',-1,'UTF8') xml = project_file.read() d = QtXml.QDomDocument() d.setContent(xml) except IOError: QtWidgets.QMessageBox.about(None, "Fehler", "QGIS Projektdatei " + pfad + " nicht gefunden!") return # Die gewünschten Tagelemente aus dem XML herauslesen self.maps = d.elementsByTagName("maplayer") self.legends = d.elementsByTagName("legendlayer") self.gruppen = d.elementsByTagName("legendgroup") self.lyr = None self.joinlayerid = None #Zuerst den aktuellen Pfad auf dem #Qgis steht auslesen (kann z.B. ein lokaler Pfad sein #von dem ein Projekt geladen wird CurrentPath = QgsProject.instance().fileName() #Dann auf den jeweiligen Pfad setzen, von dem geladen wird. Sonst kann kein Projekt #mit absoluten Pfaden abgespeichert werden (für Layer die mit dem #VogisMenü geladen werden) QgsProject.instance().setFileName(pfad) #falls es länger dauert, ein kurzes Infofenster #für den Anwender progressi = QtWidgets.QProgressDialog('Lade Daten','Abbrechen',0,self.maps.length()) progressi.setFixedSize(350,90) btnCancel = QtWidgets.QPushButton() btnCancel.setText('Abbrechen') btnCancel.setFixedSize(70,30) progressi.setCancelButton(btnCancel) progressi.setWindowModality(1) #Schleife geht alle Layer die in der Legende aufscheinen durch. Hier #ist nämlich die reihenfolge festgelegt, wie sie in Qgis dargestellt werden #Diese Schleife brauch ich nur für die richtige Reihenfolge #der importierten Layer in Qgis zaehler = 0 # der Zähler für die Anzahl der geladenen Layer j = 0 #for j in range(self.legends.length(),-1,-1): for j in range(self.legends.length()): # Schleife geht alle Layer die in der maplayer tags aufscheinen durch # dort ist nämlich die wirkliche Information für die Darstellung im # Qgis. Also wird zuerst der Layer per ID in der Obigen # Schleife ausgewählt und dann in dieser Schleife im maplayertag # identifiziert # self.lyr=None for i in range(self.maps.length()): # prüfen ob der jeweilige layer nicht schon geladen ist. um das zu tun # müssen wir im vogis projektimport die identifikation über # die layerid tag machen. berücksichtigt werden muß auch # ob die layerid durch den ergaenzungsnamen erweitert wurde!! quelli = self.maps.item(i).namedItem("id").firstChild().toText().data() laden = True lyr_tmp = None for lyr_tmp in QgsProject.instance().mapLayers(): #alle bereits geladenen Layer durchgehen -> Dictionary #QtWidgets.QMessageBox.about(None, "Fehler", str(lyr_tmp)) if (ergaenzungsname == None) and (lyr_tmp == quelli): #Treffer: der Layer ist schon geladen laden = False if (ergaenzungsname != None) and (lyr_tmp == quelli + ergaenzungsname): #Treffer: der Layer ist schon geladen laden = False #Die Layerid ist in den legend tags und maplayer tags gleich #so kann ein layer genau identifiziert werden. ist laden zudem True #gehts also weiter if (self.maps.item(i).namedItem("id").firstChild().toText().data() == self.legends.item(j).namedItem("filegroup").namedItem("legendlayerfile").attributes().namedItem("layerid").nodeValue()) and laden: #ACHTUNG: Wieder aktivieren!!!!!!!!!! # wenn nur ein Teil der Layer eines Projekts geladen werden sollen. Die Liste enthält die # Namen dieser Layer if liste != None: brake_val = True for nd in range(len(liste)): if liste[nd] == self.legends.item(j).attributes().namedItem("name").nodeValue(): brake_val = False break if brake_val: continue # Nächster Layer, ist nicht auf der Liste # prüfen, ob der jeweilige Layer eine oder mehrere Jointabelle(n) verwendet self.joinlayerid = '' for sj in range(self.maps.item(i).namedItem("vectorjoins").childNodes().length()): # leider muss ich dann nochmals alles durchgehen.... for lj in range(self.maps.length()): if (self.maps.item(lj).namedItem("id").firstChild().toText().data() == self.maps.item(i).namedItem("vectorjoins").childNodes().item(sj).attributes().namedItem('joinLayerId').nodeValue()): self.joinlayerid = self.maps.item(i).namedItem("vectorjoins").childNodes().item(sj).attributes().namedItem('joinLayerId').nodeValue() #ACHTUNG: unbedingt den nodeValue der ID ändern wenn Gemeindeweise #geladen wird (DKM) Da in den Qgis Projekten der Gemeinden die jeweilig ID des Layers #der Einfachheit halber ident ist, würde so qgis den Layer nicht importieren!!! #So wie der Layername in der Darstellung geändert wird wird auch die ID des Nodes VOR #dem Laden geändert, damit Qgis das dann so übernimmt!! noddi = self.maps.item(i).namedItem("id") if ergaenzungsname != None: noddi.firstChild().setNodeValue(noddi.firstChild().nodeValue() + ergaenzungsname) #Abhängig von der vogisini wird das Encoding #aus der Projektdatei genommen oder CPG datei oder #wird auf System gesetzt #ist self.vogisEncoding == project dann werden die Einstellungen des Projekt verwendet base_name = os.path.dirname(pfad) + '/' + os.path.basename(self.maps.item(i).namedItem("datasource").firstChild().nodeValue()) # Achtung, zwischen absolutem und relativem Pfad unterscheiden if len(os.path.dirname(self.maps.item(i).namedItem("datasource").firstChild().nodeValue())) < 2: # relativer Pfad im QGIS Projekt! base_name = os.path.dirname(pfad) + '/' + os.path.basename(self.maps.item(i).namedItem("datasource").firstChild().nodeValue()) else: # absoluter Pfad im QGIS Projekt! base_name = self.maps.item(i).namedItem("datasource").firstChild().nodeValue() if vogisEncoding_global[0] == 'menue': # entweder CPG datei oder System setzen try: # gibts ein cpg datei datei = open(os.path.splitext(base_name)[0] + '.cpg','r') codierung_string = datei.read() datei.close() self.maps.item(i).namedItem("provider").attributes().namedItem('encoding').setNodeValue(codierung_string) except IOError: # Es wird der Wert System zugewiesen self.maps.item(i).namedItem("provider").attributes().namedItem('encoding').setNodeValue('System') # unbedingt ALLES DESELEKTIEREN, sonst Probleme mit der Reihenfolge self.iface.layerTreeView().setCurrentLayer(None) # None entspricht einem Null Pointer -> Auswahl wird entfernt -> nicht ausgewählt nv_ds = '' nv_provider = '' nv_encoding = '' ############################################################################# # Das Umschalten der Vektordaten auf die Geodatenbank - unter Bedingungen # es darf kein Layer aus einer Geodatenbank hier verwurschtelt werden ############################################################################# if self.maps.item(i).attributes().namedItem('type').nodeValue() == 'vector' and vogisDb_global[0] != 'filesystem geodaten' and self.maps.item(i).namedItem("datasource").firstChild().nodeValue().find('host') < 0: tablename = self.maps.item(i).namedItem("datasource").firstChild().nodeValue() sql = '' rc=[] db_ogr = '' # prüfen ob der layer eine shape datenquelle ist # und ob ein subset definiert ist if tablename.find('.shp') > 0 and (tablename.lower().find('subset') > 0 or tablename.lower().find('SUBSET') > 0 or tablename.lower().find('Subset') > 0): rc = textfilter_subset(self.maps.item(i).namedItem("datasource").firstChild().nodeValue()) tablename = rc[0] sql = rc[1] db_ogr = rc[0] else: tablename = os.path.basename(self.maps.item(i).namedItem("datasource").firstChild().nodeValue()).split('.shp')[0] db_ogr = tablename if ergaenzungsname != None and DBschema_erweitern: tablename = str.lower('\"' + ergaenzungsname + '\".\"' + tablename + '\"') else: tablename = str.lower('\"vorarlberg".\"' + tablename + '\"') # Sonderzeichen berücksichtigen! tablename = tablename.replace(('ä'),'ae') tablename = tablename.replace(('Ä'),'Ae') tablename = tablename.replace(('ö'),'oe') tablename = tablename.replace(('Ö'),'Oe') tablename = tablename.replace(('ü'),'ue') tablename = tablename.replace(('Ü'),'Ue') tablename = tablename.replace(('ß'),'ss') tablename = tablename.replace('. ','_') ################################################ # Geometriespalte bestimmen -- geht nur mit OGR param_list = str.split(vogisDb_global[0]) host = '' dbname='' port='' for param in param_list: if str.find(param,'dbname') >= 0: dbname = str.replace(param,'dbname=','') elif str.find(param,'host=') >= 0: host = str.replace(param,'host=','') elif str.find(param,'port=') >= 0: port = str.replace(param,'port=','') try: if auth_user == None: outputdb = ogr.Open('pg: host=' + host + ' dbname=' + dbname + ' schemas=vorarlberg' + ' port=' + port) else: outputdb = ogr.Open('pg: host=' + host + ' dbname=' + dbname + ' schemas=vorarlberg' + ' port=' + port + ' user='******'the_geom' ################################################## # Geometriespalte Ende if self.maps.item(i).namedItem("datasource").firstChild().nodeValue().find('ogc_fid') > 0: # Achtung, das Attribut user darf nicht zwingend immer nur klein sein -> Siehe Usermapping in der Doku if auth_user == None: dbpath = str.lower(vogisDb_global[0] + ' sslmode=disable table=' + tablename + ' (' + geom_column + ') sql') + sql else: dbpath = str.lower(vogisDb_global[0]) + ' user='******' sslmode=disable table=' + tablename + ' (' + geom_column + ') sql') + sql else: # Achtung, das Attribut user darf nicht zwingend immer nur klein sein -> Siehe Usermapping in der Doku if auth_user == None: dbpath = str.lower(vogisDb_global[0] + ' sslmode=disable key=ogc_fid table=' + tablename + ' (' + geom_column + ') sql') + sql else: dbpath = str.lower(vogisDb_global[0]) + ' user='******' sslmode=disable key=ogc_fid table=' + tablename + ' (' + geom_column + ') sql') + sql nv_ds = self.maps.item(i).namedItem("datasource").firstChild().nodeValue() nv_provider = self.maps.item(i).namedItem("provider").firstChild().nodeValue() nv_encoding = self.maps.item(i).namedItem("provider").attributes().namedItem('encoding').nodeValue() self.maps.item(i).namedItem("datasource").firstChild().setNodeValue(dbpath) self.maps.item(i).namedItem("provider").firstChild().setNodeValue('postgres') self.maps.item(i).namedItem("provider").attributes().namedItem('encoding').setNodeValue('UTF-8') if os.path.abspath(os.path.dirname(__file__)) != path_global[0]: return # Layer einlesen! proj_read = QgsProject.instance().readLayer(self.maps.item(i)) # Der Fortschrittsbalken progressi.setValue(j) progressi.forceShow() if progressi.wasCanceled(): break #QtGui.QMessageBox.about(None, "Achtung", str(proj_read)) if not proj_read and vogisDb_global[0] == 'filesystem geodaten': # hier wird der Layer geladen und gemäß den Eintragungen # der DomNode auch gerendert und dargestellt QtWidgets.QMessageBox.about(None, "Achtung", "Layer " + self.legends.item(j).attributes().namedItem("name").nodeValue() + " nicht gefunden!") continue elif not proj_read and vogisDb_global[0] != 'filesystem geodaten': # Probieren auf Filesystem umzuschalten QtWidgets.QMessageBox.about(None, "Achtung", "Layer - " + self.legends.item(j).attributes().namedItem("name").nodeValue() + " - in der Datenbank nicht gefunden - es wird aufs Filesystem umgeschaltet") self.maps.item(i).namedItem("datasource").firstChild().setNodeValue(nv_ds) self.maps.item(i).namedItem("provider").firstChild().setNodeValue(nv_provider) self.maps.item(i).namedItem("provider").attributes().namedItem(nv_encoding) if not QgsProject.instance().readLayer(self.maps.item(i)): #Trotzdem nicht gefunden, wir geben auf QtWidgets.QMessageBox.about(None, "Achtung", "Layer " + self.legends.item(j).attributes().namedItem("name").nodeValue() + " nicht gefunden!") continue # den Anzeigenamen im Qgis ebenfalls ändern # dazu zuerst den richtigen Layer anhand der Layerid auswählen # leginterface = self.iface.legendInterface() #for lyr_tmp in leginterface.layers(): for lyr_tmp in QgsProject.instance().mapLayers(): #alle bereits geladenen Layer durchgehen -> Dictionary if lyr_tmp == noddi.firstChild().nodeValue(): self.lyr = QgsProject.instance().mapLayers()[lyr_tmp] if force_scale != None: self.lyr.setMaximumScale(25000) self.lyr.setScaleBasedVisibility(True) #Abhängig von der vogisini wird das KBS #aus der Projektdatei genommen oder aus dem *.prj File if vogisKBS_global[0] == 'menue': #Koordinatenbezugssystem aus dem prj file holen, wenn vorhanden, #und von dort zuweisen (die Projekteinstellung überschreiben) try: datei = open(os.path.splitext(self.lyr.source())[0] + '.prj','r') bezugssystem_string = datei.read() #falls kein sauberer EPSG String, machen wir eine Zuweisung für unser 31254 if (re.search('MGI\D+Austria\D+GK\D+West',bezugssystem_string, re.I)) != None: #Arcgis macht keinen sauberen EPSG String bezugssystem_crs = QgsCoordinateReferenceSystem() bezugssystem_crs.createFromSrid(31254) else: bezugssystem_crs = QgsCoordinateReferenceSystem(bezugssystem_string) datei.close() self.lyr.setCrs(bezugssystem_crs) except IOError: pass #dann in der Applikation registrieren #QgsMapLayerRegistry.instance().addMapLayer(self.lyr) # gejointe Tabellen brauchen eine Spezialbehandlung: Joininfo wird # ausgelesen, dann der join gelöscht und erst wenn alles geladen wurde # wieder neu erstellt. Sonst kann es Probleme geben! unterstütz # werden beleibig viele layer mit beliebig vielen joins # es handelt sich um einen layer mir midestens einem eingetragenen join single_lyr_join = lyr_join() # eigenes struktur objekt instanzieren if not self.joinlayerid == '': # checken ob für den layer mindestens ein join eingetragen ist single_lyr_join.joinlayer = self.lyr single_lyr_join.joininfo = self.lyr.vectorJoins() self.joinliste.append(single_lyr_join) # eine liste mit joinlayern und deren joininfo führen for rem_join in self.lyr.vectorJoins(): # für den joinlayer die joins entfernen - es können merhere sein kasperle = rem_join.joinLayerId self.lyr.removeJoin(str(rem_join.joinLayerId)) #Und nun noch den Layernamen für die Darstellung #im Qgis ergänzen. Siehe oben, bei gemeindeweisem Laden if (ergaenzungsname != None) and (self.lyr != None) and self.anzeigename_aendern: # noch ein boolean wegen der wasserwirtschaft!! if not (self.lyr.name().find(ergaenzungsname) > -1): # ACHTUNG: Sonst wird bei wiederholtem klicken der Name nochmal rangehängt if self.lyr.name().find("(a)") > -1: aktname = str.strip((self.lyr.name().rstrip("(a)"))) + "-" + ergaenzungsname + " (a)" self.lyr.setName(aktname) else: aktname = str.strip(self.lyr.name())+ "-" + ergaenzungsname self.lyr.setName(aktname) # abschließend schauen ob der aktiviert ist if (self.legends.item(j).attributes().namedItem("checked").nodeValue() == "Qt::Unchecked") and not (self.lyr is None): #leginterface.setLayerVisible(self.lyr,False) lyr_tree = QgsProject.instance().layerTreeRoot().findLayer(self.lyr) lyr_tree.setItemVisibilityChecked(False) index = QgsProject.instance().layerTreeRoot() zwetsch =QgsProject.instance().layerTreeRoot().findLayer(self.lyr.id()) dummy = zwetsch.clone() # Die Layer die später geladen werden müssen # auch weiter unte in der Legende sein Reihenfolge) # das wird mit der Variable zaehler gesteuert # QGIS höher 2.6 index_ins = index_zuweisen(self.legends.item(j).attributes().namedItem("name").nodeValue(),self.legends.item(j).parentNode()) index.insertChildNode(-1,dummy) zaehler = zaehler + 1 zwetsch.parent().removeChildNode(zwetsch) # sonst gibts probleme in der Reihenfolge # wenn gruppen und layer im top level vermischt if not (self.legends.item(j).parentNode().nodeName() == "legendgroup") and (force_gruppenname is None): zwetsch =QgsProject.instance().layerTreeRoot().findLayer(self.lyr.id()) dummy = zwetsch.clone() index.insertChildNode(index_ins,dummy) zwetsch.parent().removeChildNode(zwetsch) #abschließend schauen ob der Layer aufgeklappt ist #und das flag setzen if (self.legends.item(j).attributes().namedItem("open").nodeValue() == "false") and not (self.lyr is None): dummy.setExpanded(False) elif (self.legends.item(j).attributes().namedItem("open").nodeValue() == "true") and not (self.lyr is None): dummy.setExpanded(True) # hier könnte abgebrochen werden, wenn die layer einfach # nur reingeladen werden OHNE in Gruppenlyer abgelegt zu werden # continue ####################################################### # hier beginnt der Programmteil der die Gruppenlayer # behandelt - entweder wenn im Projektfile definiert # oder einfach wenn es im Menü # erwünscht wird ####################################################### if (self.legends.item(j).parentNode().nodeName() == "legendgroup") or not (force_gruppenname is None): self.gruppe_vorhanden = False #ACHTUNG: Layername und direkt übergeordneter Gruppenname #müssen sich unterscheiden, sonst kommts zu einem Fehler. Sollts #dennoch mal vorkommen, wird es hier abgefangen if self.legends.item(j).parentNode().attributes().namedItem("name").nodeValue() == self.legends.item(j).attributes().namedItem("name").nodeValue(): aktname = self.lyr.name() self.lyr.setName(aktname+"_") #prüfen ob die Gruppe schon angelegt ist grp_name = self.legends.item(j).parentNode().attributes().namedItem("name").nodeValue() #Name der Gruppe aus dem QGS Projektfile grp_obj = QgsProject.instance().layerTreeRoot().findGroup(grp_name) if (isinstance(grp_obj,QgsLayerTreeGroup)) and (not (grp_obj is None)): self.gruppe_vorhanden = True grp_name = force_gruppenname #Name ist übergeben worden grp_obj = QgsProject.instance().layerTreeRoot().findGroup(grp_name) if (isinstance(grp_obj,QgsLayerTreeGroup)) and (not (grp_obj is None)): self.gruppe_vorhanden = True ######################################################### # Gruppenlayer aus Projektdatei ######################################################### if self.legends.item(j).parentNode().attributes().namedItem("name").nodeValue() != "" and self.legends.item(j).parentNode().nodeName() == "legendgroup": QgsLayerTreeRegistryBridge(QgsProject.instance().layerTreeRoot(),QgsProject.instance()) kind = self.legends.item(j).parentNode() gruppen_hierarchie = pos_gruppe() gruppen_liste = [] while (kind.nodeName() == "legendgroup"): gruppen_hierarchie.name = kind.attributes().namedItem("name").nodeValue() # der name der dem layer unmittelbar übergeordnete Gruppe: Ebene gruppen_hierarchie.index = index_zuweisen(kind.attributes().namedItem("name").nodeValue(),kind.parentNode()) # Index der Darstellungsreihenfolge der Gruppe in ihrer Hierarchie gruppen_hierarchie.ex = kind.attributes().namedItem("open").nodeValue() gruppen_hierarchie.ch = kind.attributes().namedItem("checked").nodeValue() gruppen_liste.append(copy.deepcopy(gruppen_hierarchie)) # ACHTUNG: Referenz!! kind = kind.parentNode() # grp enthält das qtreewidgetitem Objekt der Gruppe!, in die der geladene # Layer verschoben werden soll! grp = sublayer(QgsProject.instance().layerTreeRoot(),gruppen_liste, self.gruppen_erg_name, nach_unten, anzeigename_ergaenzen)[0] #sollten es mehrere sein, immer nur die erste nehmen - siehe Erklärung beim Sub selbst zwtsch = QgsProject.instance().layerTreeRoot().findLayer(self.lyr.id()) dummy = zwtsch.clone() if not (isinstance(grp,QgsLayerTreeGroup)) or grp is None: QtWidgets.QMessageBox.about(None, "ACHTUNG","Anlegen der Gruppe gescheitert") break index_layer = index_zuweisen(self.legends.item(j).attributes().namedItem("name").nodeValue(),self.legends.item(j).parentNode()) # QtGui.QMessageBox.about(None, "LayeriD", str(dummy.layerId())) grp.insertChildNode(index_layer,dummy) zwtsch.parent().removeChildNode(zwtsch) # zwilling entfernen! ########################################################## # hier Endet der Teil der Gruppenlayer aus Projektdatei!! ######################################################### letzterplatz = False #Flagvariable ermittelt ob die Gruppe ganz nach unten gehört #die gruppe in die der layer eingebettet ist kommt nicht aus #einem projekt, sondern wird erzwungen. hier gibts allerdings #nur eine ebene (was das ganze einfacher macht) if (not force_gruppenname is None): # gruppe anlegen gruppen_hierarchie = pos_gruppe() gruppen_hierarchie.name = force_gruppenname # grp = sublayer(QgsProject.instance().layerTreeRoot(),leginterface,[gruppen_hierarchie])[0] grp = sublayer(QgsProject.instance().layerTreeRoot(),[gruppen_hierarchie])[0] zwtsch = QgsProject.instance().layerTreeRoot().findLayer(self.lyr.id()) #der geladene layer dummy = zwtsch.clone() # wiviele layer sind in der gruppe bereits vorhanden? # baum = QgsLayerTreeModel(grp) # anzahl_top_level_eintraege = baum.rowCount() baum = grp.findLayers() anzahl_top_level_eintraege = len(baum) baum = None # Sonst Absturz bei grp.parent().removeChildNode(grp) da baum auf ein Nichts refenrenziert! # den neuen ganz hinten einsetzen grp.insertChildNode(anzahl_top_level_eintraege,dummy) zwtsch.parent().removeChildNode(zwtsch) grp.setExpanded(False) if nach_unten: # ganz nach unten mit der gefüllten Gruppe, wenn das Flag gesetzt ist if not self.gruppe_vorhanden: dummy = grp.clone() QgsProject.instance().layerTreeRoot().insertChildNode(-1,dummy) grp.parent().removeChildNode(grp) else: # die Layer werden NICHT in einen self.gruppenlayer geladen # sollen aber nach unten verschoben werden if nach_unten: # wiviele layer sind in der gruppe bereits vorhanden? baum = QgsLayerTreeModel(QgsProject.instance().layerTreeRoot()) anzahl_top_level_eintraege = baum.rowCount() baum = None # Sonst Absturz bei grp.parent().removeChildNode(grp) da baum auf ein Nichts refenrenziert! zwtsch = QgsProject.instance().layerTreeRoot().findLayer(self.lyr.id()) #der geladene layer dummy = zwtsch.clone() # den neuen ganz hinten einsetzen QgsProject.instance().layerTreeRoot().insertChildNode(anzahl_top_level_eintraege,dummy) zwtsch.parent().removeChildNode(zwtsch) # abschließend schauen ob der Layer aufgeklappt ist # und das flag setzen - beim Verschieben in die Gruppenlayer # verändert sich das nämlich manchmal... if (self.legends.item(j).attributes().namedItem("open").nodeValue() == "false") and not (self.lyr is None): dummy.setExpanded(False) elif (self.legends.item(j).attributes().namedItem("open").nodeValue() == "true") and not (self.lyr is None): dummy.setExpanded(True) # der nachfolgende Code erzwingt eine Aktualisierung # der Legende und des MapWindow # Ansonsten kanns im Mapwindow Darstellungsprobleme geben! Wieso?? if not self.lyr is None: anzeigename = self.lyr.name() self.lyr.setName(anzeigename+" ") self.lyr.setName(anzeigename) else: QtWidgets.QMessageBox.about(None, "Achtung", "Layer " + self.legends.item(j).attributes().namedItem("name").nodeValue() + " nicht gefunden!") # unbedingt ALLES DEselektieren, sonst Probleme mit Reihenfolge self.iface.layerTreeView().setCurrentLayer(None) # None entspricht einem Null Pointer -> Auswahl wird entfernt -> nicht ausgewählt #Unbedingt zurücksetzen sonst kanns beim wiederholten #laden des gleichen Projektfiles einen Fehler geben: #wenn nämlich die Schleife erneut beginnt, nicht lädt und self.lyr #beim vorherigen laden steht! self.lyr = None # und weiter in der Schleife! # UNBEDINGT am Schluss QGis wieder auf den usprünglichen # Pfad zurücksetzen QgsProject.instance().setFileName(CurrentPath) #ACHTUNG: Aus irgendeinem Grund gibts Probleme mit den Gruppenlayer: Wenn innerhalb der so angelegten Gruppen # ein Layer ausgewählt wird, gibts beim Laden danach einen Fehler. Es MUSS deshalb der oberste Eintrag # der Legende vor allem Laden als Aktueller Layer gesetzt werden!!! #Objekte besser löschen self.legends = None self.legendTree = None self.maps = None self.legends = None self.gruppen = None ###################################################################### # Abschlussprüfung: sind alle da #prüfen ob alle Layer der Liste geladen wurden #das ist notwendig, da ja beim Projektladen alles passen kann aber #ein Layer nicht vorhanden ist ###################################################################### fehler = 0 layerzaehler = 0 # Weg mit dem Fortschrittsbalken # self.info.close() if liste != None: #wenn nur ein Teil der Layer eines Projekts geladen wurde. Die Liste enthält die #Namen dieser Layer for nd in range(len(liste)): for lyr_tmp_id in QgsProject.instance().mapLayers(): #alle bereits geladenen Layer durchgehen -> Dictionary lyr_tmp = QgsProject.instance().mapLayer(lyr_tmp_id) # Unbedingt die optionale Änderung des # Anzeigenamens (z.B. DKM) mitberücksichtigen!) if (ergaenzungsname != None) and self.anzeigename_aendern: if liste[nd] + "-" + ergaenzungsname == lyr_tmp.name(): layerzaehler = layerzaehler +1 elif liste[nd].rstrip(" (a)") + "-" + ergaenzungsname + ' (a)' == lyr_tmp.name(): layerzaehler = layerzaehler +1 else: if liste[nd] == lyr_tmp.name(): layerzaehler = layerzaehler +1 # ACHTUNG: Wurden nicht alle in der Liste (fürs importieren übergebne Layerliste mit Layernamen) angeführten Layer # anhand des Layernamensim Projekt gefunden gibts # hier noch eine Fehlermeldung if not liste is None: if len(liste) > layerzaehler: #Ints! Dann wurde was nicht geladen QtWidgets.QMessageBox.about(None, "Achtung", "Nicht alle Layer aus " + pfad + " konnte(n) geladen werden!!") # gejointe Relationen wiederherstellen # aber erst ganz am Schluss!! for singlejoin in self.joinliste: for singlejoininfo in singlejoin.joininfo: singlejoin.joinlayer.addJoin(singlejoininfo)
def eqframe(prefix, xmax=40.): rfile = prefix + '_eq_r.dat' phifile = prefix + '_eq_phi.dat' rhofile = prefix + '_eq_rho.dat' pfile = prefix + '_eq_p.dat' pmfile = prefix + '_eq_pm.dat' uufile = prefix + '_eq_uu.dat' udfile = prefix + '_eq_ud.dat' nr, nh, nphi, a, t = dinforead(prefix) # radial mesh: fr = open(rfile, 'r') s = str.split(str.strip(fr.readline())) r = [] while (s): r.append(s[0]) s = str.split(str.strip(fr.readline())) fr.close() r = asarray(r, dtype=double) nr = size(r) # azimuthal angle mesh: fh = open(phifile, 'r') s = str.split(str.strip(fh.readline())) phi = [] while (s): phi.append(s[0]) s = str.split(str.strip(fh.readline())) fh.close() phi = asarray(phi, dtype=double) # nh=size(phi) # 2d-grid (order??) h2, r2 = meshgrid(phi, r) print(shape(r2)) print(nr, nphi) # density: frho = open(rhofile, 'r') s = str.split(str.strip(frho.readline())) rho = [] while (s): rho.append(s[0]) s = str.split(str.strip(frho.readline())) frho.close() rho = asarray(rho, dtype=double) # print shape(rho) rho = reshape(rho, [nr, nphi]) # pressure(s): fp = open(pfile, 'r') fpm = open(pmfile, 'r') s = str.split(str.strip(fp.readline())) sm = str.split(str.strip(fpm.readline())) p = [] pm = [] while (s): p.append(s[0]) pm.append(sm[0]) s = str.split(str.strip(fp.readline())) sm = str.split(str.strip(fpm.readline())) fp.close() fpm.close() p = asarray(p, dtype=double) pm = asarray(pm, dtype=double) # print shape(rho) p = reshape(p, [nr, nphi]) pm = reshape(pm, [nr, nphi]) # velocity field: fuu = open(uufile, 'r') s = str.split(str.strip(fuu.readline())) ur = [] omega = [] while (s): ur.append(s[1]) omega.append(old_div(double(s[3]), double(s[0]))) s = str.split(str.strip(fuu.readline())) fuu.close() ur = asarray(ur, dtype=double) ur = reshape(ur, [nr, nphi]) omega = asarray(omega, dtype=double) omega = reshape(omega, [nr, nphi]) rhor = 1. + sqrt(1. - a**2) # density variations: rhomean = rho.mean(axis=1) drho = zeros([nr, nphi], dtype=double) for k in arange(nr): drho[k, :] = old_div(rho[k, :], rhomean[k]) - 1. drholevs = levels = (old_div(arange(40), double(20.)) - 0.5) * 5. clf() fig = figure() contourf(r2 * sin(phi), r2 * cos(phi), drho, levels=drholevs) xlim(-xmax, xmax) ylim(-xmax, xmax) bhole(rhor) title('deviations from mean density profile, t=' + str(t)) # axis('equal') savefig(prefix + '_eq_rho.eps') savefig(prefix + '_eq_rho.png') close() clf() contourf(r2 * sin(phi), r2 * cos(phi), ur * rho, nlevels=30) colorbar() xlim(-xmax, xmax) ylim(-xmax, xmax) bhole(rhor) title(r'$\rho u^r$, t=' + str(t)) # axis('equal') savefig(prefix + '_eq_ur.eps') savefig(prefix + '_eq_ur.png') close() # beta (magnetization) plot beta1 = 0.1 beta2 = 1000. nbeta = 20 betalevs = log10( (old_div(beta2, beta1))**(old_div(arange(nbeta), double(nbeta - 1))) * beta1) clf() contourf(r2 * sin(phi), r2 * cos(phi), log10(old_div(p, pm)), levels=betalevs) colorbar() contour(r2 * sin(phi), r2 * cos(phi), drho, colors='k', levels=drholevs, lineswidth=1) xlim(-xmax, xmax) ylim(-xmax, xmax) bhole(rhor) title(r'$\lg\beta$, t=' + str(t)) # axis('equal') savefig(prefix + '_eq_beta.eps') savefig(prefix + '_eq_beta.png')