def execute(self, grades, moduleDict, solutionDict): search = moduleDict['search'] searchAgents = moduleDict['searchAgents'] gold_solution = [ str.split(solutionDict['solution']), str.split(solutionDict['rev_solution']) ] gold_expanded = max(int(solutionDict['expanded_nodes']), int(solutionDict['rev_expanded_nodes'])) solution, expanded, error = self.getSolInfo(search, searchAgents) if error != None: grades.addMessage('FAIL: %s' % self.path) grades.addMessage('%s' % error) return False # FIXME: do we want to standardize test output format? if solution not in gold_solution: grades.addMessage('FAIL: %s' % self.path) grades.addMessage('Solution not correct.') grades.addMessage('\tstudent solution length: %s' % len(solution)) grades.addMessage('\tstudent solution:\n%s' % wrap_solution(solution)) grades.addMessage('') grades.addMessage('\tcorrect solution length: %s' % len(gold_solution[0])) grades.addMessage('\tcorrect (reversed) solution length: %s' % len(gold_solution[1])) grades.addMessage('\tcorrect solution:\n%s' % wrap_solution(gold_solution[0])) grades.addMessage('\tcorrect (reversed) solution:\n%s' % wrap_solution(gold_solution[1])) return False if expanded > self.leewayFactor * gold_expanded and expanded > gold_expanded + 1: grades.addMessage('FAIL: %s' % self.path) grades.addMessage( 'Too many node expanded; are you expanding nodes twice?') grades.addMessage('\tstudent nodes expanded: %s' % expanded) grades.addMessage('') grades.addMessage( '\tcorrect nodes expanded: %s (leewayFactor %s)' % (gold_expanded, self.leewayFactor)) return False grades.addMessage('PASS: %s' % self.path) grades.addMessage('\tpacman layout:\t\t%s' % self.layoutName) grades.addMessage('\tsolution length: %s' % len(solution)) grades.addMessage('\tnodes expanded:\t\t%s' % expanded) return True
def get_tbl_data(filename, comment='|'): """ Reads data from a table into a numpy array. :param filename: The name of the FITS file to read. :type filename: str :param comment: The symbol that represents a comment. :type comment: str :returns: numpy.ndarray -- The table data. """ f = open(filename) lines = f.readlines() tbl = [] for line in lines: if line[0] != comment: strarr = str.split(str(line)) if len(strarr) > 0: tbl.append(strarr) return np.array(tbl, dtype='float64')
def _str2deg(self,str): sstr = str.split() deg = float(sstr[0]) deg += float(sstr[1][0:2])/60. deg += float(sstr[1][3:5])/3600. if sstr[1][-1].lower() in ['s','w']: deg = -deg return deg
def dinforead(prefix): # mesh data: dfile = prefix + '_dinfo.dat' fd = open(dfile, 'r') s = str.split(str.strip(fd.readline())) nr = int(s[0]) nh = int(s[1]) nphi = int(s[2]) s = str.split(str.strip(fd.readline())) a = double(s[0]) s = str.split(str.strip(fd.readline())) if (s): t = double(s[0]) else: t = 0. print("Kerr a = " + str(a)) fd.close() return nr, nh, nphi, a, t
def execute(self, grades, moduleDict, solutionDict): search = moduleDict['search'] searchAgents = moduleDict['searchAgents'] gold_solution = [ str.split(solutionDict['solution']), str.split(solutionDict['rev_solution']) ] gold_expanded_states = [ str.split(solutionDict['expanded_states']), str.split(solutionDict['rev_expanded_states']) ] solution, expanded_states, error = self.getSolInfo(search) if error != None: grades.addMessage('FAIL: %s' % self.path) grades.addMessage('\t%s' % error) return False if solution in gold_solution and (not self.exactExpansionOrder or expanded_states in gold_expanded_states): grades.addMessage('PASS: %s' % self.path) grades.addMessage('\tsolution:\t\t%s' % solution) grades.addMessage('\texpanded_states:\t%s' % expanded_states) return True else: grades.addMessage('FAIL: %s' % self.path) grades.addMessage('\tgraph:') for line in self.diagram.split('\n'): grades.addMessage('\t %s' % (line, )) grades.addMessage('\tstudent solution:\t\t%s' % solution) grades.addMessage('\tstudent expanded_states:\t%s' % expanded_states) grades.addMessage('') grades.addMessage('\tcorrect solution:\t\t%s' % gold_solution[0]) grades.addMessage('\tcorrect expanded_states:\t%s' % gold_expanded_states[0]) grades.addMessage('\tcorrect rev_solution:\t\t%s' % gold_solution[1]) grades.addMessage('\tcorrect rev_expanded_states:\t%s' % gold_expanded_states[1]) return False
def parseAgentArgs(str): if str == None: return {} pieces = str.split(',') opts = {} for p in pieces: if '=' in p: key, val = p.split('=') else: key, val = p, 1 opts[key] = val return opts
def parse_pfamscan(text): matches = [ is_pfam_match(line) for line in text.split("\n") if is_pfam_match(line) ] pfams = [get_pfam_from_pfamacc(m.group(3)) for m in matches] return [{ "description": t[1].description, "pfamA_acc": t[0].group(3), "seq_start": int(t[0].group(1)), "seq_end": int(t[0].group(2)), "num_full": t[1].num_full } for t in zip(matches, pfams)]
def parseAgentArgs(str): if str is None: return {} pieces = str.split(',') opts = {} for p in pieces: if '=' in p: key, val = p.split('=') else: key, val = p, 1 opts[key] = val return opts
def cleanup_fault_string(self, str): """ Make a remote exception nicely readable by humans so it's not evident that is a remote fault. Users should not have to understand tracebacks. """ if str.find(">:") != -1: (first, rest) = str.split(">:", 1) if rest.startswith("\"") or rest.startswith("\'"): rest = rest[1:] if rest.endswith("\"") or rest.endswith("\'"): rest = rest[:-1] return rest else: return str
def save(self, *args, **kwargs): if self.date_taken is None: try: exif_date = self.EXIF.get('EXIF DateTimeOriginal', None) if exif_date is not None: d, t = str.split(exif_date.values) year, month, day = d.split(':') hour, minute, second = t.split(':') self.date_taken = datetime(int(year), int(month), int(day), int(hour), int(minute), int(second)) except: pass if self.date_taken is None: self.date_taken = datetime.now() if self._get_pk_val(): self.clear_cache() super(ImageModel, self).save(*args, **kwargs) self.pre_cache()
def parse_sms_text(xform, identity, text): json_survey = json.loads(xform.json) separator = json_survey.get('sms_separator', DEFAULT_SEPARATOR) \ or DEFAULT_SEPARATOR allow_media = bool(json_survey.get('sms_allow_media', False)) xlsf_date_fmt = json_survey.get('sms_date_format', DEFAULT_DATE_FORMAT) \ or DEFAULT_DATE_FORMAT xlsf_datetime_fmt = json_survey.get('sms_date_format', DEFAULT_DATETIME_FORMAT) \ or DEFAULT_DATETIME_FORMAT # extract SMS data into indexed groups of values groups = {} for group in text.split(separator)[1:]: group_id, group_text = [s.strip() for s in group.split(None, 1)] groups.update({group_id: [s.strip() for s in group_text.split(None)]}) def cast_sms_value(value, question, medias=[]): ''' Check data type of value and return cleaned version ''' xlsf_type = question.get('type') xlsf_name = question.get('name') xlsf_choices = question.get('children') xlsf_required = bool( question.get('bind', {}).get('required', '').lower() in ('yes', 'true')) # we don't handle constraint for now as it's a little complex and # unsafe. # xlsf_constraint=question.get('constraint') if xlsf_required and not len(value): raise SMSCastingError(_(u"Required field missing"), xlsf_name) def safe_wrap(func): try: return func() except Exception as e: raise SMSCastingError( _(u"%(error)s") % {'error': e}, xlsf_name) def media_value(value, medias): ''' handle media values extract name and base64 data. fills the media holder with (name, data) tuple ''' try: filename, b64content = value.split(';', 1) medias.append((filename, base64.b64decode(b64content))) return filename except Exception as e: raise SMSCastingError( _(u"Media file format " u"incorrect. %(except)r") % {'except': e}, xlsf_name) if xlsf_type == 'text': return safe_wrap(lambda: str(value)) elif xlsf_type == 'integer': return safe_wrap(lambda: int(value)) elif xlsf_type == 'decimal': return safe_wrap(lambda: float(value)) elif xlsf_type == 'select one': for choice in xlsf_choices: if choice.get('sms_option') == value: return choice.get('name') raise SMSCastingError( _(u"No matching choice " u"for '%(input)s'") % {'input': value}, xlsf_name) elif xlsf_type == 'select all that apply': values = [s.strip() for s in value.split()] ret_values = [] for indiv_value in values: for choice in xlsf_choices: if choice.get('sms_option') == indiv_value: ret_values.append(choice.get('name')) return u" ".join(ret_values) elif xlsf_type == 'geopoint': err_msg = _(u"Incorrect geopoint coordinates.") geodata = [s.strip() for s in value.split()] if len(geodata) < 2 and len(geodata) > 4: raise SMSCastingError(err_msg, xlsf_name) try: # check that latitude and longitude are floats lat, lon = [float(v) for v in geodata[:2]] # and within sphere boundaries if lat < -90 or lat > 90 or lon < -180 and lon > 180: raise SMSCastingError(err_msg, xlsf_name) if len(geodata) == 4: # check that altitude and accuracy are integers [int(v) for v in geodata[2:4]] elif len(geodata) == 3: # check that altitude is integer int(geodata[2]) except Exception as e: raise SMSCastingError(e.message, xlsf_name) return " ".join(geodata) elif xlsf_type in MEDIA_TYPES: # media content (image, video, audio) must be formatted as: # file_name;base64 encodeed content. # Example: hello.jpg;dGhpcyBpcyBteSBwaWN0dXJlIQ== return media_value(value, medias) elif xlsf_type == 'barcode': return safe_wrap(lambda: text(value)) elif xlsf_type == 'date': return safe_wrap( lambda: datetime.strptime(value, xlsf_date_fmt).date()) elif xlsf_type == 'datetime': return safe_wrap( lambda: datetime.strptime(value, xlsf_datetime_fmt)) elif xlsf_type == 'note': return safe_wrap(lambda: '') raise SMSCastingError( _(u"Unsuported column '%(type)s'") % {'type': xlsf_type}, xlsf_name) def get_meta_value(xlsf_type, identity): ''' XLSForm Meta field value ''' if xlsf_type in ('deviceid', 'subscriberid', 'imei'): return NA_VALUE elif xlsf_type in ('start', 'end'): return datetime.now().isoformat() elif xlsf_type == 'today': return date.today().isoformat() elif xlsf_type == 'phonenumber': return identity return NA_VALUE # holder for all properly formated answers survey_answers = {} # list of (name, data) tuples for media contents medias = [] # keep track of required questions notes = [] # loop on all XLSForm questions for expected_group in json_survey.get('children', [{}]): if not expected_group.get('type') == 'group': # non-grouped questions are not valid for SMS continue # retrieve part of SMS text for this group group_id = expected_group.get('sms_field') answers = groups.get(group_id) if not group_id or (not answers and not group_id.startswith('meta')): # group is not meant to be filled by SMS # or hasn't been filled continue # Add a holder for this group's answers data survey_answers.update({expected_group.get('name'): {}}) # retrieve question definition for each answer egroups = expected_group.get('children', [{}]) # number of intermediate, omited questions (medias) step_back = 0 for idx, question in enumerate(egroups): real_value = None question_type = question.get('type') if question_type in ('calculate'): # 'calculate' question are not implemented. # 'note' ones are just meant to be displayed on device continue if question_type == 'note': if not question.get('constraint', ''): notes.append(question.get('label')) continue if not allow_media and question_type in MEDIA_TYPES: # if medias for SMS has not been explicitly allowed # they are considered excluded. step_back += 1 continue # pop the number of skipped questions # so that out index is valid even if the form # contain medias questions (and medias are disabled) sidx = idx - step_back if question_type in META_FIELDS: # some question are not to be fed by users real_value = get_meta_value(xlsf_type=question_type, identity=identity) else: # actual SMS-sent answer. # Only last answer/question of each group is allowed # to have multiple spaces if is_last(idx, egroups): answer = u" ".join(answers[idx:]) else: answer = answers[sidx] if real_value is None: # retrieve actual value and fail if it doesn't meet reqs. real_value = cast_sms_value(answer, question=question, medias=medias) # set value to its question name survey_answers[expected_group.get('name')] \ .update({question.get('name'): real_value}) return survey_answers, medias, notes
def process_incoming(incoming, id_string): # assign variables if len(incoming) >= 2: identity = incoming[0].strip().lower() text = incoming[1].strip().lower() # if the tuple contains an id_string, use it, otherwise default if id_string is None and len(incoming) >= 3: id_string = incoming[2] else: responses.append({ 'code': SMS_API_ERROR, 'text': _(u"Missing 'identity' " u"or 'text' field.") }) return if not len(identity.strip()) or not len(text.strip()): responses.append({ 'code': SMS_API_ERROR, 'text': _(u"'identity' and 'text' fields can " u"not be empty.") }) return # if no id_string has been supplied # we expect the SMS to be prefixed with the form's sms_id_string if id_string is None: keyword, text = [s.strip() for s in text.split(None, 1)] xform = XForm.objects.get(user__username=username, sms_id_string=keyword) else: xform = XForm.objects.get(user__username=username, id_string=id_string) if not xform.allows_sms: responses.append({ 'code': SMS_SUBMISSION_REFUSED, 'text': _(u"The form '%(id_string)s' does not " u"accept SMS submissions.") % { 'id_string': xform.id_string } }) return # parse text into a dict object of groups with values json_submission, medias_submission, notes = parse_sms_text( xform, identity, text) # retrieve sms_response if exist in the form. json_survey = json.loads(xform.json) if json_survey.get('sms_response'): resp_str.update({'success': json_survey.get('sms_response')}) # check that the form contains at least one filled group meta_groups = sum( [1 for k in list(json_submission) if k.startswith('meta')]) if len(list(json_submission)) <= meta_groups: responses.append({ 'code': SMS_PARSING_ERROR, 'text': _(u"There must be at least one group of " u"questions filled.") }) return # check that required fields have been filled required_fields = [ f.get('name') for g in json_survey.get('children', {}) for f in g.get('children', {}) if f.get('bind', {}).get('required', 'no') == 'yes' ] submitted_fields = {} for group in json_submission.values(): submitted_fields.update(group) for field in required_fields: if not submitted_fields.get(field): responses.append({ 'code': SMS_SUBMISSION_REFUSED, 'text': _(u"Required field `%(field)s` is " u"missing.") % { 'field': field } }) return # convert dict object into an XForm string xml_submission = dict2xform(jsform=json_submission, form_id=xform.id_string) # compute notes data = {} for g in json_submission.values(): data.update(g) for idx, note in enumerate(notes): try: notes[idx] = note.replace('${', '{').format(**data) except Exception as e: logging.exception( _(u'Updating note threw exception: %s' % text(e))) # process_incoming expectes submission to be a file-like object xforms.append(BytesIO(xml_submission.encode('utf-8'))) medias.append(medias_submission) json_submissions.append(json_submission) xforms_notes.append(notes)
def gstsuche(self): # Der Username der verwendet werden soll if len(auth_user_global) > 0: # Ist belegt auth_user = auth_user_global[0] else: auth_user = None #Textfeldinhalt zurücksetzen self.gefunden.setText("") self.gefunden.repaint() schema = 'vorarlberg' ################################################ # Geometriespalte bestimmen -- geht nur mit OGR uri = QgsDataSourceUri() uri.setConnection(self.db.hostName(),str(self.db.port()),self.db.databaseName(),'','') # Kein Kennwort nötig, Single Sign On try: if auth_user == None: outputdb = ogr.Open('pg: host =' + self.db.hostName() + ' dbname =' + self.db.databaseName() + ' schemas=' + schema + ' port=' + str(self.db.port())) else: outputdb = ogr.Open('pg: host =' + self.db.hostName() + ' dbname =' + self.db.databaseName() + ' schemas=' + schema + ' port=' + str(self.db.port()) + ' user='******'gst').GetGeometryColumn() except: geom_column = 'the_geom' ################################################## uri.setDataSource(schema, 'gst', geom_column) if not auth_user == None: uri.setUsername(auth_user) gst_lyr = QgsVectorLayer(uri.uri(), "gst","postgres") #------------------------------------------------------ # Subset Suche: Gibts so ein Grundstück überhaupt? # erst wenn ja, dann wird geladen #------------------------------------------------------ fid = [] # Eingabefeld auslesen und gleich splitten gstliste = str.split(self.txtGstnr.text(),",") abfr_str = '' nummer = '' for gst in gstliste: if abfr_str == '': abfr_str = abfr_str + 'gnr = \'' + str.strip(gst) + '\' ' nummer = nummer + gst + " " else: abfr_str = abfr_str + 'or gnr = \'' + str.strip(gst) + '\' ' nummer = nummer + gst + " " gst_lyr.setSubsetString('(' + abfr_str +') and kg = (\'' + self.kgnummer + '\')') gst_lyr.selectAll() fid = gst_lyr.selectedFeatureIds() #------------------------------------------------------ # Ende Subset Suche #------------------------------------------------------ #Wurde was gefunden? ja/nein if gst_lyr.selectedFeatureCount() >= 1: #Eins gefunden, Textfeld und Zoompunkt festlegen self.gefunden.setText(("Grundstück ") + nummer + " in KG " + self.Kgemeinde + " gefunden") self.zoompunkt = gst_lyr.boundingBoxOfSelected() # Erstmal die Gemeinde laden self.ladeGemeinde() # Ein Problem haben wir, da die FIDs der Layer nicht übereinstimmenm, # da diese aus einem VIEW stammen und im Modul Projektimport zugewiesen werden # um zu selektieren den geladenen Layer suchen #for lyr_tmp in self.iface.legendInterface().layers(): for lyr_tmp_d in QgsProject.instance().mapLayers(): # vergisst und auch bei einem refresh nicht richtig macht.... #if lyr_tmp.name() == ("Grundstücke-") + self.Gemeinde + ' (a)': lyr_tmp = QgsProject.instance().mapLayers()[lyr_tmp_d] if lyr_tmp.name() == ("Grundstücke-") + self.Gemeinde + ' (a)': #lyr_tmp = QgsProject.instance().mapLayers()[lyr_tmp_d] #if lyr_tmp.name() == ("Grundstücke-") + 'Vorarlberg (a)': # und nochmal die Subset auswahl durchführen # FIDS abfragen, Subset zurücksetzen und FIDS selektieren if not fid is None: lyr_tmp.setSubsetString('(' + abfr_str +') and kg = (\'' + self.kgnummer + '\')') lyr_tmp.selectAll() fid = lyr_tmp.selectedFeatureIds() lyr_tmp.setSubsetString('') lyr_tmp.selectByIds(fid) # und selektieren else: #nichts gefunden: Textfeld und Zoompunkt zurücksetzen self.gefunden.setText(("Grundstück ") + self.txtGstnr.text() + " in KG " + self.Kgemeinde + " nicht gefunden") self.zoompunkt = None
def parse_sms_text(xform, identity, text): json_survey = json.loads(xform.json) separator = json_survey.get('sms_separator', DEFAULT_SEPARATOR) \ or DEFAULT_SEPARATOR allow_media = bool(json_survey.get('sms_allow_media', False)) xlsf_date_fmt = json_survey.get('sms_date_format', DEFAULT_DATE_FORMAT) \ or DEFAULT_DATE_FORMAT xlsf_datetime_fmt = json_survey.get('sms_date_format', DEFAULT_DATETIME_FORMAT) \ or DEFAULT_DATETIME_FORMAT # extract SMS data into indexed groups of values groups = {} for group in text.split(separator)[1:]: group_id, group_text = [s.strip() for s in group.split(None, 1)] groups.update({group_id: [s.strip() for s in group_text.split(None)]}) def cast_sms_value(value, question, medias=[]): ''' Check data type of value and return cleaned version ''' xlsf_type = question.get('type') xlsf_name = question.get('name') xlsf_choices = question.get('children') xlsf_required = bool(question.get('bind', {}) .get('required', '').lower() in ('yes', 'true')) # we don't handle constraint for now as it's a little complex and # unsafe. # xlsf_constraint=question.get('constraint') if xlsf_required and not len(value): raise SMSCastingError(_(u"Required field missing"), xlsf_name) def safe_wrap(func): try: return func() except Exception as e: raise SMSCastingError(_(u"%(error)s") % {'error': e}, xlsf_name) def media_value(value, medias): ''' handle media values extract name and base64 data. fills the media holder with (name, data) tuple ''' try: filename, b64content = value.split(';', 1) medias.append((filename, base64.b64decode(b64content))) return filename except Exception as e: raise SMSCastingError(_(u"Media file format " u"incorrect. %(except)r") % {'except': e}, xlsf_name) if xlsf_type == 'text': return safe_wrap(lambda: str(value)) elif xlsf_type == 'integer': return safe_wrap(lambda: int(value)) elif xlsf_type == 'decimal': return safe_wrap(lambda: float(value)) elif xlsf_type == 'select one': for choice in xlsf_choices: if choice.get('sms_option') == value: return choice.get('name') raise SMSCastingError(_(u"No matching choice " u"for '%(input)s'") % {'input': value}, xlsf_name) elif xlsf_type == 'select all that apply': values = [s.strip() for s in value.split()] ret_values = [] for indiv_value in values: for choice in xlsf_choices: if choice.get('sms_option') == indiv_value: ret_values.append(choice.get('name')) return u" ".join(ret_values) elif xlsf_type == 'geopoint': err_msg = _(u"Incorrect geopoint coordinates.") geodata = [s.strip() for s in value.split()] if len(geodata) < 2 and len(geodata) > 4: raise SMSCastingError(err_msg, xlsf_name) try: # check that latitude and longitude are floats lat, lon = [float(v) for v in geodata[:2]] # and within sphere boundaries if lat < -90 or lat > 90 or lon < -180 and lon > 180: raise SMSCastingError(err_msg, xlsf_name) if len(geodata) == 4: # check that altitude and accuracy are integers [int(v) for v in geodata[2:4]] elif len(geodata) == 3: # check that altitude is integer int(geodata[2]) except Exception as e: raise SMSCastingError(e, xlsf_name) return " ".join(geodata) elif xlsf_type in MEDIA_TYPES: # media content (image, video, audio) must be formatted as: # file_name;base64 encodeed content. # Example: hello.jpg;dGhpcyBpcyBteSBwaWN0dXJlIQ== return media_value(value, medias) elif xlsf_type == 'barcode': return safe_wrap(lambda: text(value)) elif xlsf_type == 'date': return safe_wrap(lambda: datetime.strptime(value, xlsf_date_fmt).date()) elif xlsf_type == 'datetime': return safe_wrap(lambda: datetime.strptime(value, xlsf_datetime_fmt)) elif xlsf_type == 'note': return safe_wrap(lambda: '') raise SMSCastingError(_(u"Unsuported column '%(type)s'") % {'type': xlsf_type}, xlsf_name) def get_meta_value(xlsf_type, identity): ''' XLSForm Meta field value ''' if xlsf_type in ('deviceid', 'subscriberid', 'imei'): return NA_VALUE elif xlsf_type in ('start', 'end'): return datetime.now().isoformat() elif xlsf_type == 'today': return date.today().isoformat() elif xlsf_type == 'phonenumber': return identity return NA_VALUE # holder for all properly formated answers survey_answers = {} # list of (name, data) tuples for media contents medias = [] # keep track of required questions notes = [] # loop on all XLSForm questions for expected_group in json_survey.get('children', [{}]): if not expected_group.get('type') == 'group': # non-grouped questions are not valid for SMS continue # retrieve part of SMS text for this group group_id = expected_group.get('sms_field') answers = groups.get(group_id) if not group_id or (not answers and not group_id.startswith('meta')): # group is not meant to be filled by SMS # or hasn't been filled continue # Add a holder for this group's answers data survey_answers.update({expected_group.get('name'): {}}) # retrieve question definition for each answer egroups = expected_group.get('children', [{}]) # number of intermediate, omited questions (medias) step_back = 0 for idx, question in enumerate(egroups): real_value = None question_type = question.get('type') if question_type in ('calculate'): # 'calculate' question are not implemented. # 'note' ones are just meant to be displayed on device continue if question_type == 'note': if not question.get('constraint', ''): notes.append(question.get('label')) continue if not allow_media and question_type in MEDIA_TYPES: # if medias for SMS has not been explicitly allowed # they are considered excluded. step_back += 1 continue # pop the number of skipped questions # so that out index is valid even if the form # contain medias questions (and medias are disabled) sidx = idx - step_back if question_type in META_FIELDS: # some question are not to be fed by users real_value = get_meta_value(xlsf_type=question_type, identity=identity) else: # actual SMS-sent answer. # Only last answer/question of each group is allowed # to have multiple spaces if is_last(idx, egroups): answer = u" ".join(answers[idx:]) else: answer = answers[sidx] if real_value is None: # retrieve actual value and fail if it doesn't meet reqs. real_value = cast_sms_value(answer, question=question, medias=medias) # set value to its question name survey_answers[expected_group.get('name')] \ .update({question.get('name'): real_value}) return survey_answers, medias, notes
def process_incoming(incoming, id_string): # assign variables if len(incoming) >= 2: identity = incoming[0].strip().lower() text = incoming[1].strip().lower() # if the tuple contains an id_string, use it, otherwise default if id_string is None and len(incoming) >= 3: id_string = incoming[2] else: responses.append({'code': SMS_API_ERROR, 'text': _(u"Missing 'identity' " u"or 'text' field.")}) return if not len(identity.strip()) or not len(text.strip()): responses.append({'code': SMS_API_ERROR, 'text': _(u"'identity' and 'text' fields can " u"not be empty.")}) return # if no id_string has been supplied # we expect the SMS to be prefixed with the form's sms_id_string if id_string is None: keyword, text = [s.strip() for s in text.split(None, 1)] xform = XForm.objects.get(user__username=username, sms_id_string=keyword) else: xform = XForm.objects.get(user__username=username, id_string=id_string) if not xform.allows_sms: responses.append({'code': SMS_SUBMISSION_REFUSED, 'text': _(u"The form '%(id_string)s' does not " u"accept SMS submissions.") % {'id_string': xform.id_string}}) return # parse text into a dict object of groups with values json_submission, medias_submission, notes = parse_sms_text(xform, identity, text) # retrieve sms_response if exist in the form. json_survey = json.loads(xform.json) if json_survey.get('sms_response'): resp_str.update({'success': json_survey.get('sms_response')}) # check that the form contains at least one filled group meta_groups = sum([1 for k in list(json_submission) if k.startswith('meta')]) if len(list(json_submission)) <= meta_groups: responses.append({'code': SMS_PARSING_ERROR, 'text': _(u"There must be at least one group of " u"questions filled.")}) return # check that required fields have been filled required_fields = [f.get('name') for g in json_survey.get('children', {}) for f in g.get('children', {}) if f.get('bind', {}).get('required', 'no') == 'yes'] submitted_fields = {} for group in json_submission.values(): submitted_fields.update(group) for field in required_fields: if not submitted_fields.get(field): responses.append({'code': SMS_SUBMISSION_REFUSED, 'text': _(u"Required field `%(field)s` is " u"missing.") % {'field': field}}) return # convert dict object into an XForm string xml_submission = dict2xform(jsform=json_submission, form_id=xform.id_string) # compute notes data = {} for g in json_submission.values(): data.update(g) for idx, note in enumerate(notes): try: notes[idx] = note.replace('${', '{').format(**data) except Exception as e: logging.exception(_(u'Updating note threw exception: %s' % text(e))) # process_incoming expectes submission to be a file-like object xforms.append(BytesIO(xml_submission.encode('utf-8'))) medias.append(medias_submission) json_submissions.append(json_submission) xforms_notes.append(notes)
def get_emails(str): if str is None: return [] else: return [email.strip() for email in str.split(",") if email.strip() != ""]
def tedplotter(dire): nr, nh, nphi, a, t = dinforead(dire + '/merge') rfile = dire + '/merge_r.dat' fr = open(rfile, 'r') s = str.split(str.strip(fr.readline())) r = [] while (s): r.append(s[0]) s = str.split(str.strip(fr.readline())) fr.close() r = asarray(r, dtype=double) # nr=size(r) # polar angle mesh: hfile = '/home/pasha/harm/harmpi/' + dire + '/merge_h.dat' fh = open(hfile, 'r') s = str.split(str.strip(fh.readline())) th = [] while (s): th.append(s[0]) s = str.split(str.strip(fh.readline())) fh.close() th = asarray(th, dtype=double) # nh=size(th) # 2d-grid (order??) h2, r2 = meshgrid(th, r) print(shape(r2)) print(nr, nh) # pressure: pfile = dire + '/merge_p.dat' fp = open(pfile, 'r') s = str.split(str.strip(fp.readline())) p = [] while (s): p.append(s[0]) s = str.split(str.strip(fp.readline())) fp.close() p = asarray(p, dtype=double) p = reshape(p, [nr, nh]) # TudMA, TudEM trr = [] thh = [] tpp = [] trp = [] thp = [] tmafile = dire + '/merge_tudma.dat' ftma = open(tmafile, 'r') s = str.split(str.strip(ftma.readline())) rho = [] while (s): trr.append(s[5]) thh.append(s[10]) tpp.append(s[15]) trp.append(s[7]) thp.append(s[11]) s = str.split(str.strip(ftma.readline())) ftma.close() trr = reshape(asarray(trr, dtype=double), [nr, nh]) thh = reshape(asarray(thh, dtype=double), [nr, nh]) tpp = reshape(asarray(tpp, dtype=double), [nr, nh]) trp = reshape(asarray(trp, dtype=double), [nr, nh]) thp = reshape(asarray(thp, dtype=double), [nr, nh]) emtrr = [] emthh = [] emtpp = [] emtrp = [] emthp = [] temfile = dire + '/merge_tudem.dat' ftem = open(temfile, 'r') s = str.split(str.strip(ftem.readline())) rho = [] while (s): emtrr.append(s[5]) emthh.append(s[10]) emtpp.append(s[15]) emtrp.append(s[7]) emthp.append(s[11]) s = str.split(str.strip(ftem.readline())) ftem.close() emtrr = reshape(asarray(emtrr, dtype=double), [nr, nh]) emthh = reshape(asarray(emthh, dtype=double), [nr, nh]) emtpp = reshape(asarray(emtpp, dtype=double), [nr, nh]) emtrp = reshape(asarray(emtrp, dtype=double), [nr, nh]) emthp = reshape(asarray(emthp, dtype=double), [nr, nh]) alevs1 = 1e-3 * 0.5 alevs2 = 1.0 * 0.5 na = 30 alevs = (old_div(alevs2, alevs1))**(old_div(arange(na), double(na - 1))) * alevs1 alevs = around(alevs, 3) alevs[0] = 0. alevs = unique(alevs) cmap = plt.get_cmap('jet') cmap.set_bad('white', 1.) norm = BoundaryNorm(alevs, ncolors=cmap.N, clip=False) rmax = 15. rhor = 1. + (1. - a**2)**0.5 clf() fig = figure() subplot(121) contourf(r2 * sin(h2), r2 * cos(h2), fabs(old_div((trp + emtrp), p)), levels=alevs, norm=norm, cmap=cmap) colorbar() contour(r2 * sin(h2), r2 * cos(h2), (old_div((trp + emtrp), p)), colors='w', levels=[0.]) contour(r2 * sin(h2), r2 * cos(h2), p, colors='w', linestyles='dotted') xlim(0., rmax) ylim(old_div(-rmax, 2.), old_div(rmax, 2.)) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) title(r'$\alpha_{r\varphi}$') subplot(122) contourf(r2 * sin(h2), r2 * cos(h2), fabs(old_div((thp + emthp), p)), levels=alevs, norm=norm, cmap=cmap) colorbar() contour(r2 * sin(h2), r2 * cos(h2), (old_div((thp + emthp), p)), colors='w', levels=[0.]) contour(r2 * sin(h2), r2 * cos(h2), p, colors='w', linestyles='dotted') xlim(0., rmax) ylim(old_div(-rmax, 2.), old_div(rmax, 2.)) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) title(r'$\alpha_{z\varphi}$') fig.set_size_inches(15, 5) fig.tight_layout(pad=0., h_pad=-2.) savefig(dire + '/alphas.eps') close()
def eqframe(prefix, xmax=40.): rfile = prefix + '_eq_r.dat' phifile = prefix + '_eq_phi.dat' rhofile = prefix + '_eq_rho.dat' pfile = prefix + '_eq_p.dat' pmfile = prefix + '_eq_pm.dat' uufile = prefix + '_eq_uu.dat' udfile = prefix + '_eq_ud.dat' nr, nh, nphi, a, t = dinforead(prefix) # radial mesh: fr = open(rfile, 'r') s = str.split(str.strip(fr.readline())) r = [] while (s): r.append(s[0]) s = str.split(str.strip(fr.readline())) fr.close() r = asarray(r, dtype=double) nr = size(r) # azimuthal angle mesh: fh = open(phifile, 'r') s = str.split(str.strip(fh.readline())) phi = [] while (s): phi.append(s[0]) s = str.split(str.strip(fh.readline())) fh.close() phi = asarray(phi, dtype=double) # nh=size(phi) # 2d-grid (order??) h2, r2 = meshgrid(phi, r) print(shape(r2)) print(nr, nphi) # density: frho = open(rhofile, 'r') s = str.split(str.strip(frho.readline())) rho = [] while (s): rho.append(s[0]) s = str.split(str.strip(frho.readline())) frho.close() rho = asarray(rho, dtype=double) # print shape(rho) rho = reshape(rho, [nr, nphi]) # pressure(s): fp = open(pfile, 'r') fpm = open(pmfile, 'r') s = str.split(str.strip(fp.readline())) sm = str.split(str.strip(fpm.readline())) p = [] pm = [] while (s): p.append(s[0]) pm.append(sm[0]) s = str.split(str.strip(fp.readline())) sm = str.split(str.strip(fpm.readline())) fp.close() fpm.close() p = asarray(p, dtype=double) pm = asarray(pm, dtype=double) # print shape(rho) p = reshape(p, [nr, nphi]) pm = reshape(pm, [nr, nphi]) # velocity field: fuu = open(uufile, 'r') s = str.split(str.strip(fuu.readline())) ur = [] omega = [] while (s): ur.append(s[1]) omega.append(old_div(double(s[3]), double(s[0]))) s = str.split(str.strip(fuu.readline())) fuu.close() ur = asarray(ur, dtype=double) ur = reshape(ur, [nr, nphi]) omega = asarray(omega, dtype=double) omega = reshape(omega, [nr, nphi]) rhor = 1. + sqrt(1. - a**2) # density variations: rhomean = rho.mean(axis=1) drho = zeros([nr, nphi], dtype=double) for k in arange(nr): drho[k, :] = old_div(rho[k, :], rhomean[k]) - 1. drholevs = levels = (old_div(arange(40), double(20.)) - 0.5) * 5. clf() fig = figure() contourf(r2 * sin(phi), r2 * cos(phi), drho, levels=drholevs) xlim(-xmax, xmax) ylim(-xmax, xmax) bhole(rhor) title('deviations from mean density profile, t=' + str(t)) # axis('equal') savefig(prefix + '_eq_rho.eps') savefig(prefix + '_eq_rho.png') close() clf() contourf(r2 * sin(phi), r2 * cos(phi), ur * rho, nlevels=30) colorbar() xlim(-xmax, xmax) ylim(-xmax, xmax) bhole(rhor) title(r'$\rho u^r$, t=' + str(t)) # axis('equal') savefig(prefix + '_eq_ur.eps') savefig(prefix + '_eq_ur.png') close() # beta (magnetization) plot beta1 = 0.1 beta2 = 1000. nbeta = 20 betalevs = log10( (old_div(beta2, beta1))**(old_div(arange(nbeta), double(nbeta - 1))) * beta1) clf() contourf(r2 * sin(phi), r2 * cos(phi), log10(old_div(p, pm)), levels=betalevs) colorbar() contour(r2 * sin(phi), r2 * cos(phi), drho, colors='k', levels=drholevs, lineswidth=1) xlim(-xmax, xmax) ylim(-xmax, xmax) bhole(rhor) title(r'$\lg\beta$, t=' + str(t)) # axis('equal') savefig(prefix + '_eq_beta.eps') savefig(prefix + '_eq_beta.png')
def velread(prefix='merge_', nope=False, ifaphi=True): # prefix = 'titan2/merge_' rfile = prefix + '_r.dat' fr = open(rfile, 'r') s = str.split(str.strip(fr.readline())) r = [] while (s): r.append(s[0]) s = str.split(str.strip(fr.readline())) fr.close() r = asarray(r, dtype=double) hfile = prefix + '_h.dat' fh = open(hfile, 'r') s = str.split(str.strip(fh.readline())) h = [] while (s): h.append(s[0]) s = str.split(str.strip(fh.readline())) fh.close() h = asarray(h, dtype=double) h2, r2 = meshgrid(h, r) nr, nh = shape(h2) # density: rhofile = prefix + '_rho.dat' frho = open(rhofile, 'r') s = str.split(str.strip(frho.readline())) rho = [] while (s): rho.append(s[0]) s = str.split(str.strip(frho.readline())) frho.close() rho = asarray(rho, dtype=double) rho = reshape(rho, [nr, nh]) # pressure: pfile = prefix + '_p.dat' fp = open(pfile, 'r') s = str.split(str.strip(fp.readline())) p = [] while (s): p.append(s[0]) s = str.split(str.strip(fp.readline())) fp.close() p = asarray(p, dtype=double) p = reshape(p, [nr, nh]) # magnetic pressure: pfile = prefix + '_mp.dat' fmp = open(pfile, 'r') s = str.split(str.strip(fmp.readline())) mp = [] while (s): mp.append(s[0]) s = str.split(str.strip(fmp.readline())) fmp.close() mp = asarray(mp, dtype=double) mp = reshape(mp, [nr, nh]) # velocities: uufile = prefix + '_uu.dat' udfile = prefix + '_ud.dat' puufile = prefix + '_puu.dat' pudfile = prefix + '_pud.dat' mpuufile = prefix + '_mpuu.dat' mpudfile = prefix + '_mpud.dat' uu0, uur, uuh, uup = rk.uread(uufile, [nr, nh]) # density-averaged velocity ud0, udr, udh, udp = rk.uread(udfile, [nr, nh]) # density-averaged velocity u0 = sqrt(fabs(uu0 * ud0)) ur = sqrt(fabs(uur * udr)) * sign(uur) uh = sqrt(fabs(uuh * udh)) * sign(uuh) up = sqrt(fabs(uup * udp)) * sign(uup) if (nope): pu0, pur, puh, pup = u0, ur, uh, up mpu0, mpur, mpuh, mpup = u0, ur, uh, up else: puu0, puur, puuh, puup = rk.uread( puufile, [nr, nh]) # pressure-averaged velocity pud0, pudr, pudh, pudp = rk.uread( pudfile, [nr, nh]) # pressure-averaged velocity mpuu0, mpuur, mpuuh, mpuup = rk.uread( mpuufile, [nr, nh]) # pressure-averaged velocity mpud0, mpudr, mpudh, mpudp = rk.uread( mpudfile, [nr, nh]) # pressure-averaged velocity pu0 = sqrt(fabs(puu0 * pud0)) pur = sqrt(fabs(puur * pudr)) * sign(puur) puh = sqrt(fabs(puuh * pudh)) * sign(puuh) pup = sqrt(fabs(puup * pudp)) * sign(puup) mpu0 = sqrt(fabs(mpuu0 * mpud0)) mpur = sqrt(fabs(mpuur * mpudr)) * sign(mpuur) mpuh = sqrt(fabs(mpuuh * mpudh)) * sign(mpuuh) mpup = sqrt(fabs(mpuup * mpudp)) * sign(mpuup) if (ifaphi): # vector potential A_phi: pfile = prefix + '_aphi.dat' faphi = open(pfile, 'r') s = str.split(str.strip(faphi.readline())) aphi = [] while (s): aphi.append(s[0]) s = str.split(str.strip(faphi.readline())) faphi.close() aphi = asarray(aphi, dtype=double) aphi = reshape(aphi, [nr, nh]) else: aphi = rho return r2, h2, rho, p, mp, u0, ur, uh, up, pu0, pur, puh, pup, mpu0, mpur, mpuh, mpup, aphi
def corvee(n1,n2): re.rg("gdump") nx=re.nx ; ny=re.ny ; nz=re.nz gdet=re.gdet ; gcov=re.gcov ; _dx2=re._dx2 ; _dx3=re._dx3 ; drdx=re.drdx r=re.r ; h=re.h ; phi=re.ph # importing coordinate mesh # velocities: uufile='merge_uu.dat' udfile='merge_ud.dat' fuu=open(uufile, 'r') fud=open(udfile, 'r') # s=str.split(str.strip(fuu.readline())) # mean velocity field uumean=zeros([4,nx,ny,nz], dtype=double) udmean=zeros([4,nx,ny,nz], dtype=double) for kx in arange(nx): for ky in arange(ny): s=str.split(str.strip(fuu.readline())) uumean[0,kx,ky,:]=double(s[0]) uumean[1,kx,ky,:]=double(s[1]) uumean[2,kx,ky,:]=double(s[2]) uumean[3,kx,ky,:]=double(s[3]) s=str.split(str.strip(fud.readline())) udmean[0,kx,ky,:]=double(s[0]) udmean[1,kx,ky,:]=double(s[1]) udmean[2,kx,ky,:]=double(s[2]) udmean[3,kx,ky,:]=double(s[3]) fuu.close() fud.close() # tetrad components: t0=tetrad_t(uumean, udmean) tr=tetrad_r(uumean, udmean) th=tetrad_h(uumean, udmean) tp=tetrad_p(uumean, udmean) # print shape(tr) nframes=n2-n1+1 n=n1+arange(nframes) for k in n: fname=re.dumpname(k) re.rd(fname) uu=re.uu ; ud=re.ud ; rho=re.rho if(k==n1): rhomean=rho # velocity components: uu0=uu[0]*drdx[0,0]-uumean[0] ; ud0=old_div(ud[0],drdx[0,0])-udmean[0] uur=uu[1]*drdx[1,1]-uumean[1] ; udr=old_div(ud[1],drdx[1,1])-udmean[1] uuh=uu[2]*drdx[2,2]-uumean[2] ; udh=old_div(ud[2],drdx[2,2])-udmean[2] uup=uu[3]*drdx[3,3]-uumean[3] ; udp=old_div(ud[3],drdx[3,3])-udmean[3] tuur=(uu0*tr[0]+uur*tr[1]+uuh*tr[2]+uup*tr[3]) # co-moving velocity components tuuh=(uu0*th[0]+uur*th[1]+uuh*th[2]+uup*th[3]) tuup=(uu0*tp[0]+uur*tp[1]+uuh*tp[2]+uup*tp[3]) drh=rho*tuur*tuuh ; drp=rho*tuur*tuup ; dhp=rho*tuuh*tuup drr=rho*tuur*tuur ; dpp=rho*tuup*tuup ; dhh=rho*tuuh*tuuh # print(shape(drh)) # print(shape(rho)) # print(shape(tuur)) else: rhomean+=rho # velocity components: uu0=uu[0]-uumean[0] ; ud0=ud[0]-udmean[0] uur=uu[1]-uumean[1] ; udr=ud[1]-udmean[1] uuh=uu[2]-uumean[2] ; udh=ud[2]-udmean[2] uup=uu[3]-uumean[3] ; udp=ud[3]-udmean[3] tuur=(uu0*tr[0]+uur*tr[1]+uuh*tr[2]+uup*tr[3]) tuuh=(uu0*th[0]+uur*th[1]+uuh*th[2]+uup*th[3]) tuup=(uu0*tp[0]+uur*tp[1]+uuh*tp[2]+uup*tp[3]) drh+=rho*tuur*tuuh ; drp+=rho*tuur*tuup ; dhp+=rho*tuuh*tuup drr+=rho*tuur*tuur ; dpp+=rho*tuup*tuup ; dhh+=rho*tuuh*tuuh drh/=rhomean ; drp/=rhomean ; dhp/=rhomean drr/=rhomean ; dpp/=rhomean ; dhh/=rhomean drh=drh.mean(axis=2) ; drp=drp.mean(axis=2) ; dhp=dhp.mean(axis=2) drr=drr.mean(axis=2) ; dpp=dpp.mean(axis=2) ; dhh=dhh.mean(axis=2) fout=open('merge_corv.dat', 'w') for kx in arange(nx): for ky in arange(ny): # RR HH PP RH HP RP fout.write(str(drr[kx,ky])+' '+str(dhh[kx,ky])+' '+str(dpp[kx,ky])+' '+str(drh[kx,ky])+' '+str(dhp[kx,ky])+' '+str(drp[kx,ky])+'\n') fout.close()
def mplotter(dire='.', nope=False): dmatrix = False nr, nh, nphi, a, t = dinforead(dire + '/merge') r2, h2, rho, p, pm, u0, ur, uh, up, pu0, pur, puh, pup, mpu0, mpur, mpuh, mpup, aphi = velread( dire + '/merge') # # velocity correlation matrix: if (dmatrix): dfile = dire + '/merge_corv.dat' fd = open(dfile, 'r') # s=str.split(str.strip(fd.readline())) dxy = zeros([3, 3, nr, nh], dtype=double) # vtrace1=zeros([nr,nh], dtype=double) for kx in arange(nr): for ky in arange(nh): s = str.split(str.strip(fd.readline())) dxy[0, 0, kx, ky] = double(s[0]) dxy[1, 1, kx, ky] = double(s[1]) dxy[2, 2, kx, ky] = double(s[2]) dxy[0, 1, kx, ky] = double(s[3]) dxy[1, 0, kx, ky] = double(s[3]) dxy[1, 2, kx, ky] = double(s[4]) dxy[2, 1, kx, ky] = double(s[4]) dxy[0, 2, kx, ky] = double(s[5]) dxy[2, 0, kx, ky] = double(s[5]) # vtrace = trace(dxy, axis1=0, axis2=1) # print "vtrace = "+str(vtrace1.min())+" to "+str(vtrace1.max()) # print "vtrace = "+str(vtrace1.min())+" to "+str(vtrace1.max()) # vertical slice: rrangemin = 10. rrangemax = 12. rrange = double((r2 > rrangemin) * (r2 < rrangemax)) # averaging over radial velocity vtracemean = old_div((vtrace * rho * rrange).mean(axis=0), (rho * rrange).mean(axis=0)) urmean = old_div((ur * rho * rrange).mean(axis=0), (rho * rrange).mean(axis=0)) upmean = old_div((up * rho * rrange).mean(axis=0), (rho * rrange).mean(axis=0)) uhmean = old_div((uh * rho * rrange).mean(axis=0), (rho * rrange).mean(axis=0)) th = unique(h2) fig = figure() clf() plot(cos(th), sqrt(vtracemean), label='velocity RMS', color='b') plot(cos(th), urmean, label='radial velocity', color='r') plot(cos(th), -urmean, color='r', linestyle='dotted') plot(cos(th), upmean, label='rotation velocity', color='k') # plot(cos(th), th*0.+rrangemin/(rrangemin**1.5+a), color='k', linestyle='dotted') # plot(cos(th), th*0.+rrangemax/(rrangemax**1.5+a), color='k', linestyle='dotted') plot(cos(th), uhmean, label='latitudinal velocity', color='g') plot(cos(th), -uhmean, color='g', linestyle='dotted') yscale('log') legend(loc='best') xlabel(r'$\cos\theta$') ylabel('$v/c$') fig.set_size_inches(12, 6) savefig(dire + '/velcompare.eps') close() ono = 20 # number of angular frequency levels rmin = old_div(h.Risco(a), 2.) rhor = 1. + (1. - a**2)**0.5 rmax = 20. rlevs = (rmax / rmin * 1.5)**(old_div(arange(ono), double(ono))) * rmin olevs = old_div(1., (rlevs**1.5 + a)) olevs = olevs[::-1] olevs[ono - 1] = olevs.max() * 10. cmap = plt.get_cmap('jet') cmap.set_bad('white', 1.) # grr=1./(1.-2./r+a**2/r**2) norm = BoundaryNorm(olevs, ncolors=cmap.N, clip=True) # density plot: clf() contourf(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), cmap=cmap, nlevels=30) contour(r2 * sin(h2), r2 * cos(h2), aphi, colors='k') xlim(0., rmax) ylim(old_div(-rmax, 2.), old_div(rmax, 2.)) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) savefig(dire + '/rho.eps') savefig(dire + '/rho.png') # beta magnetization plot: beta1 = 0.1 beta2 = 1000. nbeta = 30 betalevs = log10( (old_div(beta2, beta1))**(old_div(arange(nbeta), double(nbeta - 1))) * beta1) clf() contourf(r2 * sin(h2), r2 * cos(h2), log10(old_div(p, pm)), levels=betalevs) colorbar() contour(r2 * sin(h2), r2 * cos(h2), log10(old_div(p, pm)), levels=[0.], colors='w', linewidths=2.) xlim(0., rmax) ylim(old_div(-rmax, 2.), old_div(rmax, 2.)) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) savefig(dire + '/beta.eps') savefig(dire + '/beta.png') # radial velocity clf() fig = figure() contourf(r2 * sin(h2), r2 * cos(h2), up, levels=olevs, norm=norm) colorbar() contour(r2 * sin(h2), r2 * cos(h2), old_div(1., ((r2 * sin(h2))**1.5 + a)), colors='k', levels=olevs, linewidths=1) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) fig.set_size_inches(8, 8) savefig(dire + '/omega.eps') savefig(dire + '/omega.png') vlevs = (arange(ono) / double(ono) * 2. - 1.) * 0.01 vlevs[0] = ur.min() * 1.1 vlevs[ono - 1] = ur.max() * 1.1 norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) hdisk = 0.25 wdisk = double(fabs(cos(h2)) < hdisk) wwind = double(fabs(cos(h2)) > hdisk) urmean = old_div(((rho * ur) * wdisk).mean(axis=1), (rho * wdisk).mean(axis=1)) urmeanp = old_div(((p * pur) * wdisk).mean(axis=1), (p * wdisk).mean(axis=1)) # flow lines: nx = 20 ny = 21 xmin = 0. xmax = 30. ymin = -10. ymax = 10. cs = 1. #p/rho/(4./3.) xflow = (xmax - xmin) * (arange(nx) + 0.5) / double(nx) + xmin yflow = (ymax - ymin) * (arange(ny) + 0.5) / double(ny) + ymin x2, y2 = meshgrid(xflow, yflow) # vxfun=interp2d(r2*sin(h2), r2*cos(h2), ur*sin(h2)+uh*cos(h2),kind='linear') # vyfun=interp2d(r2*sin(h2), r2*cos(h2), -uh*sin(h2)+ur*cos(h2),kind='linear') # vx=vxfun(xflow, yflow) ; vy=vyfun(xflow, yflow) xgrid = (r2 * sin(h2)).flatten() ygrid = (r2 * cos(h2)).flatten() vxflow = (ur / cs * sin(h2) + uh / cs * cos(h2)).flatten() vyflow = (-uh / cs * sin(h2) + ur / cs * cos(h2)).flatten() pvxflow = (pur / cs * sin(h2) + puh / cs * cos(h2)).flatten() pvyflow = (-puh / cs * sin(h2) + pur / cs * cos(h2)).flatten() # vxflow=xgrid ; vyflow=ygrid vx = griddata(list(zip(xgrid, ygrid)), vxflow, (x2, y2), method='nearest') vy = griddata(list(zip(xgrid, ygrid)), vyflow, (x2, y2), method='nearest') pvx = griddata(list(zip(xgrid, ygrid)), pvxflow, (x2, y2), method='nearest') pvy = griddata(list(zip(xgrid, ygrid)), pvyflow, (x2, y2), method='nearest') vmin = 1e-8 # sqrt((vx**2+vy**2)).min()*9. vmax = 0.1 # sqrt((vx**2+vy**2)).max()*1.1 vlevs = log10( (old_div(vmax, vmin))**(old_div(arange(20), double(19))) * vmin) vlevs[0] = -30. norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) # vmax=0.01 # vmin=-0.01 # vlevs=(vmax-vmin)*(arange(20)/double(19))+vmin # norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) clf() fig = figure() contourf(r2 * sin(h2), r2 * cos(h2), log10(old_div(sqrt(ur**2 + uh**2), cs)), levels=vlevs, norm=norm) # contourf(xflow, yflow, sqrt(vx**2+vy**2),levels=vlevs,norm=norm) colorbar() streamplot(xflow, yflow, pvx, pvy, color='k') streamplot(xflow, yflow, vx, vy, color='w') xlim(xmin, xmax) ylim(ymin, ymax) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) fig.set_size_inches(15, 8) savefig(dire + '/stream.eps') savefig(dire + '/stream.png') close() # near eqplane: xscale = 10. nx = 7 ny = 5 xflow = xscale * (arange(nx)) / double(nx - 1) yflow = xscale * hdisk * ((arange(ny)) / double(ny - 1) * 2. - 1.) x2, y2 = meshgrid(xflow, yflow) vx = griddata(list(zip(xgrid, ygrid)), vxflow, (x2, y2), method='nearest') vy = griddata(list(zip(xgrid, ygrid)), vyflow, (x2, y2), method='nearest') pvx = griddata(list(zip(xgrid, ygrid)), pvxflow, (x2, y2), method='nearest') pvy = griddata(list(zip(xgrid, ygrid)), pvyflow, (x2, y2), method='nearest') vratmin = 0.6 # 0.2 vratmax = 1.1 # 1. nv = 10 vratlevs = (arange(nv + 1)) / double(nv) * (vratmax - vratmin) + vratmin # vratlevs[9]=1.3 clf() fig = figure() # (sqrt(pur**2+puh**2))/(sqrt(ur**2+uh**2)) contourf(r2 * sin(h2), r2 * cos(h2), old_div(pur, ur), levels=vratlevs, cmap='jet') colorbar() contour(r2 * sin(h2), r2 * cos(h2), old_div(pur, ur), levels=[1.], colors='w') plot([0., xscale], [0., 0.], color='k', linestyle='dotted') # streamplot(xflow, yflow, pvx, pvy,color='k') streamplot(xflow, yflow, vx, vy, color='k') xlim(0.5, xscale) ylim(-xscale * hdisk, xscale * hdisk) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) fig.set_size_inches(5 * 2 + 1, 5 * 2 * hdisk + 1.5) fig.tight_layout(pad=0.5) savefig(dire + '/streamband.eps') savefig(dire + '/streamband.png') close() vratmin = 0.5 # 0.2 vratmax = 2.5 # 1. nv = 10 vratlevs = (arange(nv + 1)) / double(nv) * (vratmax - vratmin) + vratmin clf() fig = figure() # (sqrt(pur**2+puh**2))/(sqrt(ur**2+uh**2)) contourf(r2 * sin(h2), r2 * cos(h2), old_div(mpur, pur), levels=vratlevs, cmap='jet') colorbar() contour(r2 * sin(h2), r2 * cos(h2), old_div(mpur, pur), levels=[1.], colors='w') plot([0., xscale], [0., 0.], color='k', linestyle='dotted') # streamplot(xflow, yflow, pvx, pvy,color='k') streamplot(xflow, yflow, vx, vy, color='k') xlim(0.5, xscale) ylim(-xscale * hdisk, xscale * hdisk) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) fig.set_size_inches(5 * 2 + 1, 5 * 2 * hdisk + 1.5) fig.tight_layout(pad=0.5) savefig(dire + '/streamband_mag.eps') savefig(dire + '/streamband_mag.png') close() # vertical slice: rrange = double((r2 > 5.) * (r2 < 10.)) urhmean = old_div((ur * rho * rrange).mean(axis=0), (rho * rrange).mean(axis=0)) uhhmean = old_div((uh * rho * rrange).mean(axis=0), (rho * rrange).mean(axis=0)) urhmeanp = old_div((pur * p * rrange).mean(axis=0), (p * rrange).mean(axis=0)) uhhmeanp = old_div((puh * p * rrange).mean(axis=0), (p * rrange).mean(axis=0)) # print shape(urhmean) # print shape(h) th = unique(h2) clf() fig = figure() subplot(211) plot(cos(th), urhmean, color='k') plot(cos(th), urhmeanp, color='r') # plot(cos(th), uhhmean, color='k', linestyle='dotted') # plot(cos(th), uhhmeanp, color='r', linestyle='dotted') xlabel(r'$\cos\theta$') ylabel(r'$u^r$') ylim(-0.035, 0.005) xlim(-hdisk, hdisk) subplot(212) plot(cos(th), old_div(urhmeanp, urhmean), color='k') xlabel(r'$\cos\theta$') ylabel(r'$\langle u^r\rangle_p / \langle u^r\rangle_\rho$') xlim(-hdisk, hdisk) ylim(0., 1.) fig.set_size_inches(8, 6) fig.tight_layout(pad=1.0, h_pad=0.5, w_pad=0.5) savefig(dire + '/vverts.eps') savefig(dire + '/vverts.png') close() clf() contourf(r2 * sin(h2), r2 * cos(h2), uh, levels=vlevs, norm=norm) colorbar() # contour(r2*sin(h2), r2*cos(h2), 1./((r2*sin(h2))**1.5+a), colors='k',levels=olevs,linewidths=1) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) savefig(dire + '/uh.eps') # turbulent velocity parameters: vmin = 1e-8 vmax = 10. ono = 100 vlevs = (old_div(vmax, vmin))**(old_div(arange(ono), double(ono - 1))) * vmin norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) if (dmatrix & False): clf() fig = figure() # subplot(331) contourf(r2 * sin(h2), r2 * cos(h2), vtrace, levels=vlevs, norm=norm) colorbar() # contour(r2*sin(h2), r2*cos(h2), 1./((r2*sin(h2))**1.5+a), colors='k',levels=olevs,linewidths=1) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') # contour(r2*sin(h2), r2*cos(h2), cos(h2), colors='y',linestyles='dashed',levels=[-hdisk, hdisk]) xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) savefig(dire + '/vturb.eps') close() vlevs = (2. * (old_div(arange(ono), double(ono - 1))) - 1.) * 0.5 vlevs[0] = -1. vlevs[ono - 1] = 1. norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) clf() fig = figure() subplot(331) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[0, 0], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{rr}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(332) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[0, 1], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{r\theta}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(333) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[0, 2], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{r\varphi}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(334) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[1, 0], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{\theta r}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(335) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[1, 1], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{\theta\theta}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(336) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[1, 2], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{\theta\varphi}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(337) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[2, 0], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{\varphi r}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(338) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[2, 1], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{\varphi\theta}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) subplot(339) contourf(r2 * sin(h2), r2 * cos(h2), old_div(dxy[2, 2], vtrace), levels=vlevs, norm=norm) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), colors='w', linestyles='dotted') title(r'$\Delta_{\varphi\varphi}$') xlim(0., 20.) ylim(-10., 10.) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) fig.set_size_inches(12, 12) fig.tight_layout(pad=1.0, h_pad=0.5, w_pad=0.5) savefig(dire + '/dmatrix.eps') close() if (dmatrix & False): # the tetrad has reasonable physical sense only if u^h << u^r,phi drrdisk = old_div((dxy[0, 0] * wdisk * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) dhhdisk = old_div((dxy[1, 1] * wdisk * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) dppdisk = old_div((dxy[2, 2] * wdisk * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) drpdisk = old_div((dxy[0, 2] * wdisk * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) drhdisk = old_div((dxy[0, 1] * wdisk * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) dhpdisk = old_div((dxy[1, 2] * wdisk * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) dhpdiskplus = old_div((dxy[1, 2] * wdisk * cos(h2) * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) # dhpdiskplus=(dxy[1,2]*wdisk*cos(h2)*rho).mean(axis=1)/(wdisk*rho).mean(axis=1) drhdiskplus = old_div((dxy[1, 0] * wdisk * cos(h2) * rho).mean(axis=1), (wdisk * rho).mean(axis=1)) # drhdiskplus=(dxy[1,0]*wdisk*cos(h2)*rho).mean(axis=1)/(wdisk*rho).mean(axis=1) dtot = drrdisk + dhhdisk + dppdisk clf() plot(r, old_div(drrdisk, dtot), color='k') plot(r, old_div(dhhdisk, dtot), color='g') plot(r, old_div(dppdisk, dtot), color='r') plot(r, old_div(drpdisk, dtot), color='r', linestyle='dotted') plot(r, old_div(drhdisk, dtot), color='g', linestyle='dotted') plot(r, old_div(dhpdisk, dtot), color='orange', linestyle='dotted') plot(r, old_div(dhpdiskplus, dtot), color='orange', linestyle='dashed') plot(r, old_div(drhdiskplus, dtot), color='g', linestyle='dashed') plot(r * 0. + h.Risco(a), arange(nr) / double(nr - 1) * 2. - 1., color='k', linestyle='dotted') xlabel(r'$r$') ylabel(r'$\Delta_{ik} / \Delta_{\rm tot}$') xscale('log') xlim(1, 20) # ylim(-1e-2,1e-2) savefig(dire + '/dmatrix_rslice.eps') drrvert = old_div((dxy[0, 0] * rrange * rho).mean(axis=0), (rrange * rho).mean(axis=0)) dhhvert = old_div((dxy[1, 1] * rrange * rho).mean(axis=0), (rrange * rho).mean(axis=0)) dppvert = old_div((dxy[2, 2] * rrange * rho).mean(axis=0), (rrange * rho).mean(axis=0)) drpvert = old_div((dxy[0, 2] * rrange * rho).mean(axis=0), (rrange * rho).mean(axis=0)) drhvert = old_div((dxy[0, 1] * rrange * rho).mean(axis=0), (rrange * rho).mean(axis=0)) dhpvert = old_div((dxy[1, 2] * rrange * rho).mean(axis=0), (rrange * rho).mean(axis=0)) dvertot = drrvert + dhhvert + dppvert clf() plot(cos(th), old_div(drrvert, dvertot), color='k') plot(cos(th), old_div(dhhvert, dvertot), color='g') plot(cos(th), old_div(dppvert, dvertot), color='r') plot(cos(th), old_div(drpvert, dvertot), color='r', linestyle='dotted') plot(cos(th), old_div(drhvert, dvertot), color='k', linestyle='dotted') plot(cos(th), old_div(dhpvert, dvertot), color='orange', linestyle='dotted') xlabel(r'$\cos \theta$') ylabel(r'$\Delta_{ik} / \Delta_{\rm tot}$') savefig(dire + '/dmatrix_thslice.eps') close('all')
def split(str, splitter): return str.split(splitter)
def importieren(self, pfad = None, liste = None, ergaenzungsname = None, anzeigename_ergaenzen = False, nach_unten = False, force_gruppenname = None, force_scale = None, DBschema_erweitern = True): # Der Username der verwendet werden soll if len(auth_user_global) > 0: # Ist belegt auth_user = auth_user_global[0] else: auth_user = None self.iface.layerTreeView().setCurrentLayer(None) # None entspricht einem Null Pointer -> Auswahl wird entfernt -> nicht ausgewählt # Wird in der Regel verwendet wenn # Gemeindespezifische Daten geladen werden # zwecks Übersichtlichkeit self.anzeigename_aendern = anzeigename_ergaenzen self.gruppen_erg_name = ergaenzungsname # oberste Gruppe/Layer wird mit diesem Namen ergänzt! if pfad == "": return # Das Qgis Projektfile ist ein XML und wird # hier eingelesen try: #pfad = 'd:/delme.qgs' #xml = file(pfad).read() #QtWidgets.QMessageBox.about(None, "Fehler", str(locale.getpreferredencoding())) project_file = open(pfad,'r',-1,'UTF8') xml = project_file.read() d = QtXml.QDomDocument() d.setContent(xml) except IOError: QtWidgets.QMessageBox.about(None, "Fehler", "QGIS Projektdatei " + pfad + " nicht gefunden!") return # Die gewünschten Tagelemente aus dem XML herauslesen self.maps = d.elementsByTagName("maplayer") self.legends = d.elementsByTagName("legendlayer") self.gruppen = d.elementsByTagName("legendgroup") self.lyr = None self.joinlayerid = None #Zuerst den aktuellen Pfad auf dem #Qgis steht auslesen (kann z.B. ein lokaler Pfad sein #von dem ein Projekt geladen wird CurrentPath = QgsProject.instance().fileName() #Dann auf den jeweiligen Pfad setzen, von dem geladen wird. Sonst kann kein Projekt #mit absoluten Pfaden abgespeichert werden (für Layer die mit dem #VogisMenü geladen werden) QgsProject.instance().setFileName(pfad) #falls es länger dauert, ein kurzes Infofenster #für den Anwender progressi = QtWidgets.QProgressDialog('Lade Daten','Abbrechen',0,self.maps.length()) progressi.setFixedSize(350,90) btnCancel = QtWidgets.QPushButton() btnCancel.setText('Abbrechen') btnCancel.setFixedSize(70,30) progressi.setCancelButton(btnCancel) progressi.setWindowModality(1) #Schleife geht alle Layer die in der Legende aufscheinen durch. Hier #ist nämlich die reihenfolge festgelegt, wie sie in Qgis dargestellt werden #Diese Schleife brauch ich nur für die richtige Reihenfolge #der importierten Layer in Qgis zaehler = 0 # der Zähler für die Anzahl der geladenen Layer j = 0 #for j in range(self.legends.length(),-1,-1): for j in range(self.legends.length()): # Schleife geht alle Layer die in der maplayer tags aufscheinen durch # dort ist nämlich die wirkliche Information für die Darstellung im # Qgis. Also wird zuerst der Layer per ID in der Obigen # Schleife ausgewählt und dann in dieser Schleife im maplayertag # identifiziert # self.lyr=None for i in range(self.maps.length()): # prüfen ob der jeweilige layer nicht schon geladen ist. um das zu tun # müssen wir im vogis projektimport die identifikation über # die layerid tag machen. berücksichtigt werden muß auch # ob die layerid durch den ergaenzungsnamen erweitert wurde!! quelli = self.maps.item(i).namedItem("id").firstChild().toText().data() laden = True lyr_tmp = None for lyr_tmp in QgsProject.instance().mapLayers(): #alle bereits geladenen Layer durchgehen -> Dictionary #QtWidgets.QMessageBox.about(None, "Fehler", str(lyr_tmp)) if (ergaenzungsname == None) and (lyr_tmp == quelli): #Treffer: der Layer ist schon geladen laden = False if (ergaenzungsname != None) and (lyr_tmp == quelli + ergaenzungsname): #Treffer: der Layer ist schon geladen laden = False #Die Layerid ist in den legend tags und maplayer tags gleich #so kann ein layer genau identifiziert werden. ist laden zudem True #gehts also weiter if (self.maps.item(i).namedItem("id").firstChild().toText().data() == self.legends.item(j).namedItem("filegroup").namedItem("legendlayerfile").attributes().namedItem("layerid").nodeValue()) and laden: #ACHTUNG: Wieder aktivieren!!!!!!!!!! # wenn nur ein Teil der Layer eines Projekts geladen werden sollen. Die Liste enthält die # Namen dieser Layer if liste != None: brake_val = True for nd in range(len(liste)): if liste[nd] == self.legends.item(j).attributes().namedItem("name").nodeValue(): brake_val = False break if brake_val: continue # Nächster Layer, ist nicht auf der Liste # prüfen, ob der jeweilige Layer eine oder mehrere Jointabelle(n) verwendet self.joinlayerid = '' for sj in range(self.maps.item(i).namedItem("vectorjoins").childNodes().length()): # leider muss ich dann nochmals alles durchgehen.... for lj in range(self.maps.length()): if (self.maps.item(lj).namedItem("id").firstChild().toText().data() == self.maps.item(i).namedItem("vectorjoins").childNodes().item(sj).attributes().namedItem('joinLayerId').nodeValue()): self.joinlayerid = self.maps.item(i).namedItem("vectorjoins").childNodes().item(sj).attributes().namedItem('joinLayerId').nodeValue() #ACHTUNG: unbedingt den nodeValue der ID ändern wenn Gemeindeweise #geladen wird (DKM) Da in den Qgis Projekten der Gemeinden die jeweilig ID des Layers #der Einfachheit halber ident ist, würde so qgis den Layer nicht importieren!!! #So wie der Layername in der Darstellung geändert wird wird auch die ID des Nodes VOR #dem Laden geändert, damit Qgis das dann so übernimmt!! noddi = self.maps.item(i).namedItem("id") if ergaenzungsname != None: noddi.firstChild().setNodeValue(noddi.firstChild().nodeValue() + ergaenzungsname) #Abhängig von der vogisini wird das Encoding #aus der Projektdatei genommen oder CPG datei oder #wird auf System gesetzt #ist self.vogisEncoding == project dann werden die Einstellungen des Projekt verwendet base_name = os.path.dirname(pfad) + '/' + os.path.basename(self.maps.item(i).namedItem("datasource").firstChild().nodeValue()) # Achtung, zwischen absolutem und relativem Pfad unterscheiden if len(os.path.dirname(self.maps.item(i).namedItem("datasource").firstChild().nodeValue())) < 2: # relativer Pfad im QGIS Projekt! base_name = os.path.dirname(pfad) + '/' + os.path.basename(self.maps.item(i).namedItem("datasource").firstChild().nodeValue()) else: # absoluter Pfad im QGIS Projekt! base_name = self.maps.item(i).namedItem("datasource").firstChild().nodeValue() if vogisEncoding_global[0] == 'menue': # entweder CPG datei oder System setzen try: # gibts ein cpg datei datei = open(os.path.splitext(base_name)[0] + '.cpg','r') codierung_string = datei.read() datei.close() self.maps.item(i).namedItem("provider").attributes().namedItem('encoding').setNodeValue(codierung_string) except IOError: # Es wird der Wert System zugewiesen self.maps.item(i).namedItem("provider").attributes().namedItem('encoding').setNodeValue('System') # unbedingt ALLES DESELEKTIEREN, sonst Probleme mit der Reihenfolge self.iface.layerTreeView().setCurrentLayer(None) # None entspricht einem Null Pointer -> Auswahl wird entfernt -> nicht ausgewählt nv_ds = '' nv_provider = '' nv_encoding = '' ############################################################################# # Das Umschalten der Vektordaten auf die Geodatenbank - unter Bedingungen # es darf kein Layer aus einer Geodatenbank hier verwurschtelt werden ############################################################################# if self.maps.item(i).attributes().namedItem('type').nodeValue() == 'vector' and vogisDb_global[0] != 'filesystem geodaten' and self.maps.item(i).namedItem("datasource").firstChild().nodeValue().find('host') < 0: tablename = self.maps.item(i).namedItem("datasource").firstChild().nodeValue() sql = '' rc=[] db_ogr = '' # prüfen ob der layer eine shape datenquelle ist # und ob ein subset definiert ist if tablename.find('.shp') > 0 and (tablename.lower().find('subset') > 0 or tablename.lower().find('SUBSET') > 0 or tablename.lower().find('Subset') > 0): rc = textfilter_subset(self.maps.item(i).namedItem("datasource").firstChild().nodeValue()) tablename = rc[0] sql = rc[1] db_ogr = rc[0] else: tablename = os.path.basename(self.maps.item(i).namedItem("datasource").firstChild().nodeValue()).split('.shp')[0] db_ogr = tablename if ergaenzungsname != None and DBschema_erweitern: tablename = str.lower('\"' + ergaenzungsname + '\".\"' + tablename + '\"') else: tablename = str.lower('\"vorarlberg".\"' + tablename + '\"') # Sonderzeichen berücksichtigen! tablename = tablename.replace(('ä'),'ae') tablename = tablename.replace(('Ä'),'Ae') tablename = tablename.replace(('ö'),'oe') tablename = tablename.replace(('Ö'),'Oe') tablename = tablename.replace(('ü'),'ue') tablename = tablename.replace(('Ü'),'Ue') tablename = tablename.replace(('ß'),'ss') tablename = tablename.replace('. ','_') ################################################ # Geometriespalte bestimmen -- geht nur mit OGR param_list = str.split(vogisDb_global[0]) host = '' dbname='' port='' for param in param_list: if str.find(param,'dbname') >= 0: dbname = str.replace(param,'dbname=','') elif str.find(param,'host=') >= 0: host = str.replace(param,'host=','') elif str.find(param,'port=') >= 0: port = str.replace(param,'port=','') try: if auth_user == None: outputdb = ogr.Open('pg: host=' + host + ' dbname=' + dbname + ' schemas=vorarlberg' + ' port=' + port) else: outputdb = ogr.Open('pg: host=' + host + ' dbname=' + dbname + ' schemas=vorarlberg' + ' port=' + port + ' user='******'the_geom' ################################################## # Geometriespalte Ende if self.maps.item(i).namedItem("datasource").firstChild().nodeValue().find('ogc_fid') > 0: # Achtung, das Attribut user darf nicht zwingend immer nur klein sein -> Siehe Usermapping in der Doku if auth_user == None: dbpath = str.lower(vogisDb_global[0] + ' sslmode=disable table=' + tablename + ' (' + geom_column + ') sql') + sql else: dbpath = str.lower(vogisDb_global[0]) + ' user='******' sslmode=disable table=' + tablename + ' (' + geom_column + ') sql') + sql else: # Achtung, das Attribut user darf nicht zwingend immer nur klein sein -> Siehe Usermapping in der Doku if auth_user == None: dbpath = str.lower(vogisDb_global[0] + ' sslmode=disable key=ogc_fid table=' + tablename + ' (' + geom_column + ') sql') + sql else: dbpath = str.lower(vogisDb_global[0]) + ' user='******' sslmode=disable key=ogc_fid table=' + tablename + ' (' + geom_column + ') sql') + sql nv_ds = self.maps.item(i).namedItem("datasource").firstChild().nodeValue() nv_provider = self.maps.item(i).namedItem("provider").firstChild().nodeValue() nv_encoding = self.maps.item(i).namedItem("provider").attributes().namedItem('encoding').nodeValue() self.maps.item(i).namedItem("datasource").firstChild().setNodeValue(dbpath) self.maps.item(i).namedItem("provider").firstChild().setNodeValue('postgres') self.maps.item(i).namedItem("provider").attributes().namedItem('encoding').setNodeValue('UTF-8') if os.path.abspath(os.path.dirname(__file__)) != path_global[0]: return # Layer einlesen! proj_read = QgsProject.instance().readLayer(self.maps.item(i)) # Der Fortschrittsbalken progressi.setValue(j) progressi.forceShow() if progressi.wasCanceled(): break #QtGui.QMessageBox.about(None, "Achtung", str(proj_read)) if not proj_read and vogisDb_global[0] == 'filesystem geodaten': # hier wird der Layer geladen und gemäß den Eintragungen # der DomNode auch gerendert und dargestellt QtWidgets.QMessageBox.about(None, "Achtung", "Layer " + self.legends.item(j).attributes().namedItem("name").nodeValue() + " nicht gefunden!") continue elif not proj_read and vogisDb_global[0] != 'filesystem geodaten': # Probieren auf Filesystem umzuschalten QtWidgets.QMessageBox.about(None, "Achtung", "Layer - " + self.legends.item(j).attributes().namedItem("name").nodeValue() + " - in der Datenbank nicht gefunden - es wird aufs Filesystem umgeschaltet") self.maps.item(i).namedItem("datasource").firstChild().setNodeValue(nv_ds) self.maps.item(i).namedItem("provider").firstChild().setNodeValue(nv_provider) self.maps.item(i).namedItem("provider").attributes().namedItem(nv_encoding) if not QgsProject.instance().readLayer(self.maps.item(i)): #Trotzdem nicht gefunden, wir geben auf QtWidgets.QMessageBox.about(None, "Achtung", "Layer " + self.legends.item(j).attributes().namedItem("name").nodeValue() + " nicht gefunden!") continue # den Anzeigenamen im Qgis ebenfalls ändern # dazu zuerst den richtigen Layer anhand der Layerid auswählen # leginterface = self.iface.legendInterface() #for lyr_tmp in leginterface.layers(): for lyr_tmp in QgsProject.instance().mapLayers(): #alle bereits geladenen Layer durchgehen -> Dictionary if lyr_tmp == noddi.firstChild().nodeValue(): self.lyr = QgsProject.instance().mapLayers()[lyr_tmp] if force_scale != None: self.lyr.setMaximumScale(25000) self.lyr.setScaleBasedVisibility(True) #Abhängig von der vogisini wird das KBS #aus der Projektdatei genommen oder aus dem *.prj File if vogisKBS_global[0] == 'menue': #Koordinatenbezugssystem aus dem prj file holen, wenn vorhanden, #und von dort zuweisen (die Projekteinstellung überschreiben) try: datei = open(os.path.splitext(self.lyr.source())[0] + '.prj','r') bezugssystem_string = datei.read() #falls kein sauberer EPSG String, machen wir eine Zuweisung für unser 31254 if (re.search('MGI\D+Austria\D+GK\D+West',bezugssystem_string, re.I)) != None: #Arcgis macht keinen sauberen EPSG String bezugssystem_crs = QgsCoordinateReferenceSystem() bezugssystem_crs.createFromSrid(31254) else: bezugssystem_crs = QgsCoordinateReferenceSystem(bezugssystem_string) datei.close() self.lyr.setCrs(bezugssystem_crs) except IOError: pass #dann in der Applikation registrieren #QgsMapLayerRegistry.instance().addMapLayer(self.lyr) # gejointe Tabellen brauchen eine Spezialbehandlung: Joininfo wird # ausgelesen, dann der join gelöscht und erst wenn alles geladen wurde # wieder neu erstellt. Sonst kann es Probleme geben! unterstütz # werden beleibig viele layer mit beliebig vielen joins # es handelt sich um einen layer mir midestens einem eingetragenen join single_lyr_join = lyr_join() # eigenes struktur objekt instanzieren if not self.joinlayerid == '': # checken ob für den layer mindestens ein join eingetragen ist single_lyr_join.joinlayer = self.lyr single_lyr_join.joininfo = self.lyr.vectorJoins() self.joinliste.append(single_lyr_join) # eine liste mit joinlayern und deren joininfo führen for rem_join in self.lyr.vectorJoins(): # für den joinlayer die joins entfernen - es können merhere sein kasperle = rem_join.joinLayerId self.lyr.removeJoin(str(rem_join.joinLayerId)) #Und nun noch den Layernamen für die Darstellung #im Qgis ergänzen. Siehe oben, bei gemeindeweisem Laden if (ergaenzungsname != None) and (self.lyr != None) and self.anzeigename_aendern: # noch ein boolean wegen der wasserwirtschaft!! if not (self.lyr.name().find(ergaenzungsname) > -1): # ACHTUNG: Sonst wird bei wiederholtem klicken der Name nochmal rangehängt if self.lyr.name().find("(a)") > -1: aktname = str.strip((self.lyr.name().rstrip("(a)"))) + "-" + ergaenzungsname + " (a)" self.lyr.setName(aktname) else: aktname = str.strip(self.lyr.name())+ "-" + ergaenzungsname self.lyr.setName(aktname) # abschließend schauen ob der aktiviert ist if (self.legends.item(j).attributes().namedItem("checked").nodeValue() == "Qt::Unchecked") and not (self.lyr is None): #leginterface.setLayerVisible(self.lyr,False) lyr_tree = QgsProject.instance().layerTreeRoot().findLayer(self.lyr) lyr_tree.setItemVisibilityChecked(False) index = QgsProject.instance().layerTreeRoot() zwetsch =QgsProject.instance().layerTreeRoot().findLayer(self.lyr.id()) dummy = zwetsch.clone() # Die Layer die später geladen werden müssen # auch weiter unte in der Legende sein Reihenfolge) # das wird mit der Variable zaehler gesteuert # QGIS höher 2.6 index_ins = index_zuweisen(self.legends.item(j).attributes().namedItem("name").nodeValue(),self.legends.item(j).parentNode()) index.insertChildNode(-1,dummy) zaehler = zaehler + 1 zwetsch.parent().removeChildNode(zwetsch) # sonst gibts probleme in der Reihenfolge # wenn gruppen und layer im top level vermischt if not (self.legends.item(j).parentNode().nodeName() == "legendgroup") and (force_gruppenname is None): zwetsch =QgsProject.instance().layerTreeRoot().findLayer(self.lyr.id()) dummy = zwetsch.clone() index.insertChildNode(index_ins,dummy) zwetsch.parent().removeChildNode(zwetsch) #abschließend schauen ob der Layer aufgeklappt ist #und das flag setzen if (self.legends.item(j).attributes().namedItem("open").nodeValue() == "false") and not (self.lyr is None): dummy.setExpanded(False) elif (self.legends.item(j).attributes().namedItem("open").nodeValue() == "true") and not (self.lyr is None): dummy.setExpanded(True) # hier könnte abgebrochen werden, wenn die layer einfach # nur reingeladen werden OHNE in Gruppenlyer abgelegt zu werden # continue ####################################################### # hier beginnt der Programmteil der die Gruppenlayer # behandelt - entweder wenn im Projektfile definiert # oder einfach wenn es im Menü # erwünscht wird ####################################################### if (self.legends.item(j).parentNode().nodeName() == "legendgroup") or not (force_gruppenname is None): self.gruppe_vorhanden = False #ACHTUNG: Layername und direkt übergeordneter Gruppenname #müssen sich unterscheiden, sonst kommts zu einem Fehler. Sollts #dennoch mal vorkommen, wird es hier abgefangen if self.legends.item(j).parentNode().attributes().namedItem("name").nodeValue() == self.legends.item(j).attributes().namedItem("name").nodeValue(): aktname = self.lyr.name() self.lyr.setName(aktname+"_") #prüfen ob die Gruppe schon angelegt ist grp_name = self.legends.item(j).parentNode().attributes().namedItem("name").nodeValue() #Name der Gruppe aus dem QGS Projektfile grp_obj = QgsProject.instance().layerTreeRoot().findGroup(grp_name) if (isinstance(grp_obj,QgsLayerTreeGroup)) and (not (grp_obj is None)): self.gruppe_vorhanden = True grp_name = force_gruppenname #Name ist übergeben worden grp_obj = QgsProject.instance().layerTreeRoot().findGroup(grp_name) if (isinstance(grp_obj,QgsLayerTreeGroup)) and (not (grp_obj is None)): self.gruppe_vorhanden = True ######################################################### # Gruppenlayer aus Projektdatei ######################################################### if self.legends.item(j).parentNode().attributes().namedItem("name").nodeValue() != "" and self.legends.item(j).parentNode().nodeName() == "legendgroup": QgsLayerTreeRegistryBridge(QgsProject.instance().layerTreeRoot(),QgsProject.instance()) kind = self.legends.item(j).parentNode() gruppen_hierarchie = pos_gruppe() gruppen_liste = [] while (kind.nodeName() == "legendgroup"): gruppen_hierarchie.name = kind.attributes().namedItem("name").nodeValue() # der name der dem layer unmittelbar übergeordnete Gruppe: Ebene gruppen_hierarchie.index = index_zuweisen(kind.attributes().namedItem("name").nodeValue(),kind.parentNode()) # Index der Darstellungsreihenfolge der Gruppe in ihrer Hierarchie gruppen_hierarchie.ex = kind.attributes().namedItem("open").nodeValue() gruppen_hierarchie.ch = kind.attributes().namedItem("checked").nodeValue() gruppen_liste.append(copy.deepcopy(gruppen_hierarchie)) # ACHTUNG: Referenz!! kind = kind.parentNode() # grp enthält das qtreewidgetitem Objekt der Gruppe!, in die der geladene # Layer verschoben werden soll! grp = sublayer(QgsProject.instance().layerTreeRoot(),gruppen_liste, self.gruppen_erg_name, nach_unten, anzeigename_ergaenzen)[0] #sollten es mehrere sein, immer nur die erste nehmen - siehe Erklärung beim Sub selbst zwtsch = QgsProject.instance().layerTreeRoot().findLayer(self.lyr.id()) dummy = zwtsch.clone() if not (isinstance(grp,QgsLayerTreeGroup)) or grp is None: QtWidgets.QMessageBox.about(None, "ACHTUNG","Anlegen der Gruppe gescheitert") break index_layer = index_zuweisen(self.legends.item(j).attributes().namedItem("name").nodeValue(),self.legends.item(j).parentNode()) # QtGui.QMessageBox.about(None, "LayeriD", str(dummy.layerId())) grp.insertChildNode(index_layer,dummy) zwtsch.parent().removeChildNode(zwtsch) # zwilling entfernen! ########################################################## # hier Endet der Teil der Gruppenlayer aus Projektdatei!! ######################################################### letzterplatz = False #Flagvariable ermittelt ob die Gruppe ganz nach unten gehört #die gruppe in die der layer eingebettet ist kommt nicht aus #einem projekt, sondern wird erzwungen. hier gibts allerdings #nur eine ebene (was das ganze einfacher macht) if (not force_gruppenname is None): # gruppe anlegen gruppen_hierarchie = pos_gruppe() gruppen_hierarchie.name = force_gruppenname # grp = sublayer(QgsProject.instance().layerTreeRoot(),leginterface,[gruppen_hierarchie])[0] grp = sublayer(QgsProject.instance().layerTreeRoot(),[gruppen_hierarchie])[0] zwtsch = QgsProject.instance().layerTreeRoot().findLayer(self.lyr.id()) #der geladene layer dummy = zwtsch.clone() # wiviele layer sind in der gruppe bereits vorhanden? # baum = QgsLayerTreeModel(grp) # anzahl_top_level_eintraege = baum.rowCount() baum = grp.findLayers() anzahl_top_level_eintraege = len(baum) baum = None # Sonst Absturz bei grp.parent().removeChildNode(grp) da baum auf ein Nichts refenrenziert! # den neuen ganz hinten einsetzen grp.insertChildNode(anzahl_top_level_eintraege,dummy) zwtsch.parent().removeChildNode(zwtsch) grp.setExpanded(False) if nach_unten: # ganz nach unten mit der gefüllten Gruppe, wenn das Flag gesetzt ist if not self.gruppe_vorhanden: dummy = grp.clone() QgsProject.instance().layerTreeRoot().insertChildNode(-1,dummy) grp.parent().removeChildNode(grp) else: # die Layer werden NICHT in einen self.gruppenlayer geladen # sollen aber nach unten verschoben werden if nach_unten: # wiviele layer sind in der gruppe bereits vorhanden? baum = QgsLayerTreeModel(QgsProject.instance().layerTreeRoot()) anzahl_top_level_eintraege = baum.rowCount() baum = None # Sonst Absturz bei grp.parent().removeChildNode(grp) da baum auf ein Nichts refenrenziert! zwtsch = QgsProject.instance().layerTreeRoot().findLayer(self.lyr.id()) #der geladene layer dummy = zwtsch.clone() # den neuen ganz hinten einsetzen QgsProject.instance().layerTreeRoot().insertChildNode(anzahl_top_level_eintraege,dummy) zwtsch.parent().removeChildNode(zwtsch) # abschließend schauen ob der Layer aufgeklappt ist # und das flag setzen - beim Verschieben in die Gruppenlayer # verändert sich das nämlich manchmal... if (self.legends.item(j).attributes().namedItem("open").nodeValue() == "false") and not (self.lyr is None): dummy.setExpanded(False) elif (self.legends.item(j).attributes().namedItem("open").nodeValue() == "true") and not (self.lyr is None): dummy.setExpanded(True) # der nachfolgende Code erzwingt eine Aktualisierung # der Legende und des MapWindow # Ansonsten kanns im Mapwindow Darstellungsprobleme geben! Wieso?? if not self.lyr is None: anzeigename = self.lyr.name() self.lyr.setName(anzeigename+" ") self.lyr.setName(anzeigename) else: QtWidgets.QMessageBox.about(None, "Achtung", "Layer " + self.legends.item(j).attributes().namedItem("name").nodeValue() + " nicht gefunden!") # unbedingt ALLES DEselektieren, sonst Probleme mit Reihenfolge self.iface.layerTreeView().setCurrentLayer(None) # None entspricht einem Null Pointer -> Auswahl wird entfernt -> nicht ausgewählt #Unbedingt zurücksetzen sonst kanns beim wiederholten #laden des gleichen Projektfiles einen Fehler geben: #wenn nämlich die Schleife erneut beginnt, nicht lädt und self.lyr #beim vorherigen laden steht! self.lyr = None # und weiter in der Schleife! # UNBEDINGT am Schluss QGis wieder auf den usprünglichen # Pfad zurücksetzen QgsProject.instance().setFileName(CurrentPath) #ACHTUNG: Aus irgendeinem Grund gibts Probleme mit den Gruppenlayer: Wenn innerhalb der so angelegten Gruppen # ein Layer ausgewählt wird, gibts beim Laden danach einen Fehler. Es MUSS deshalb der oberste Eintrag # der Legende vor allem Laden als Aktueller Layer gesetzt werden!!! #Objekte besser löschen self.legends = None self.legendTree = None self.maps = None self.legends = None self.gruppen = None ###################################################################### # Abschlussprüfung: sind alle da #prüfen ob alle Layer der Liste geladen wurden #das ist notwendig, da ja beim Projektladen alles passen kann aber #ein Layer nicht vorhanden ist ###################################################################### fehler = 0 layerzaehler = 0 # Weg mit dem Fortschrittsbalken # self.info.close() if liste != None: #wenn nur ein Teil der Layer eines Projekts geladen wurde. Die Liste enthält die #Namen dieser Layer for nd in range(len(liste)): for lyr_tmp_id in QgsProject.instance().mapLayers(): #alle bereits geladenen Layer durchgehen -> Dictionary lyr_tmp = QgsProject.instance().mapLayer(lyr_tmp_id) # Unbedingt die optionale Änderung des # Anzeigenamens (z.B. DKM) mitberücksichtigen!) if (ergaenzungsname != None) and self.anzeigename_aendern: if liste[nd] + "-" + ergaenzungsname == lyr_tmp.name(): layerzaehler = layerzaehler +1 elif liste[nd].rstrip(" (a)") + "-" + ergaenzungsname + ' (a)' == lyr_tmp.name(): layerzaehler = layerzaehler +1 else: if liste[nd] == lyr_tmp.name(): layerzaehler = layerzaehler +1 # ACHTUNG: Wurden nicht alle in der Liste (fürs importieren übergebne Layerliste mit Layernamen) angeführten Layer # anhand des Layernamensim Projekt gefunden gibts # hier noch eine Fehlermeldung if not liste is None: if len(liste) > layerzaehler: #Ints! Dann wurde was nicht geladen QtWidgets.QMessageBox.about(None, "Achtung", "Nicht alle Layer aus " + pfad + " konnte(n) geladen werden!!") # gejointe Relationen wiederherstellen # aber erst ganz am Schluss!! for singlejoin in self.joinliste: for singlejoininfo in singlejoin.joininfo: singlejoin.joinlayer.addJoin(singlejoininfo)
by = 0 rx = Hrz.shape[0] ty = Hrz.shape[1] fig = plt.figure() plt.pcolormesh(XMG[by:ty, lx:rx], YMG[by:ty, lx:rx], Hrz.T[by:ty, lx:rx], vmin=0.0, vmax=0.007 * np.amax(Hrz)) plt.show(block=False) response = input("How about now? ") print(lx, rx, ty, by) Iavg = np.mean(Hrz.T[by:ty, lx:rx]) Imax = np.amax(Hrz.T[by:ty, lx:rx]) print("Average Intensity in region = %s" % Iavg) print("Maximum Intensity in region = %s" % Imax) iter = int(str.split( str.split(args.load, '_')[3], '.')[0]) # very specific naming convention in main_gromacs.py if os.path.isfile(args.output): with open(args.output, 'a') as f: f.write("{:<5d}{:6.5f}\n".format(iter, Imax)) else: with open(args.output, 'w') as f: f.write("{:<5d}{:6.5f}\n".format(iter, Imax))
def ascframe(prefix='dumps/dump000', xmax=40.): rfile = prefix + '_r.dat' hfile = prefix + '_h.dat' rhofile = prefix + '_rho.dat' pfile = prefix + '_p.dat' pmfile = prefix + '_pm.dat' uufile = prefix + '_uu.dat' udfile = prefix + '_ud.dat' bfile = prefix + '_b.dat' orifile = prefix + '_ori.dat' nr, nh, nphi, a, t = dinforead(prefix) # radial mesh: fr = open(rfile, 'r') s = str.split(str.strip(fr.readline())) r = [] while (s): r.append(s[0]) s = str.split(str.strip(fr.readline())) fr.close() r = asarray(r, dtype=double) nr = size(r) # polar angle mesh: fh = open(hfile, 'r') s = str.split(str.strip(fh.readline())) th = [] while (s): th.append(s[0]) s = str.split(str.strip(fh.readline())) fh.close() th = asarray(th, dtype=double) nh = size(th) # 2d-grid (order??) h2, r2 = meshgrid(th, r) print(shape(r2)) print(nr, nh) # density: frho = open(rhofile, 'r') s = str.split(str.strip(frho.readline())) rho = [] while (s): rho.append(s[0]) s = str.split(str.strip(frho.readline())) frho.close() rho = asarray(rho, dtype=double) rho = reshape(rho, [nr, nh]) # pressure: fp = open(pfile, 'r') fpm = open(pmfile, 'r') s = str.split(str.strip(fp.readline())) sm = str.split(str.strip(fpm.readline())) p = [] pm = [] while (s): p.append(s[0]) pm.append(sm[0]) s = str.split(str.strip(fp.readline())) sm = str.split(str.strip(fpm.readline())) fp.close() fpm.close() p = asarray(p, dtype=double) pm = asarray(pm, dtype=double) p = reshape(p, [nr, nh]) pm = reshape(pm, [nr, nh]) # velocity field: fuu = open(uufile, 'r') s = str.split(str.strip(fuu.readline())) ur = [] uh = [] omega = [] while (s): ur.append(s[1]) uh.append(s[2]) omega.append(old_div(double(s[3]), double(s[0]))) s = str.split(str.strip(fuu.readline())) fuu.close() ur = asarray(ur, dtype=double) uh = asarray(uh, dtype=double) ur = reshape(ur, [nr, nh]) uh = reshape(uh, [nr, nh]) omega = asarray(omega, dtype=double) omega = reshape(omega, [nr, nh]) # origin variables: fori = open(orifile, 'r') s = str.split(str.strip(fori.readline())) orr = [] orth = [] orphi = [] while (s): orr.append(s[0]) orth.append(s[1]) orphi.append(s[2]) s = str.split(str.strip(fori.readline())) fori.close() orr = reshape(asarray(orr, dtype=double), [nr, nh]) orth = reshape(asarray(orth, dtype=double), [nr, nh]) orphi = reshape(asarray(orphi, dtype=double), [nr, nh]) # magnetic field (the last component is A_\phi) fb = open(bfile, 'r') s = str.split(str.strip(fb.readline())) aphi = [] while (s): aphi.append(s[3]) s = str.split(str.strip(fb.readline())) fb.close() aphi = reshape(asarray(aphi, dtype=double), [nr, nh]) print("size(aphi) = " + str(shape(aphi))) rhor = 1. + sqrt(1. - a**2) cmap = plt.get_cmap('jet') ono = 30 lmin = -5. lmax = 1. lrholevs = (lmax - lmin) * arange(ono) / double(ono) + lmin norm = BoundaryNorm(lrholevs, ncolors=cmap.N, clip=True) x = r2 * sin(h2) y = r2 * cos(h2) clf() fig = figure() contourf(x, y, log10(rho + 1e-3), levels=lrholevs, norm=norm, cmap=cmap) contour(x, y, aphi, colors='k') xlim(0., xmax) ylim(-xmax / 4., xmax / 2.) bhole(rhor) # need to put time in dinfo! title('t=' + str(t) + ' (' + prefix + ')') savefig(prefix + '_rho.eps') savefig(prefix + '_rho.png') close() nxx = 10 rlevs = xmax * np.arange(nxx) / np.double(nxx) thlevs = np.pi * np.arange(nxx) / np.double(nxx) clf() contourf(x, y, orr, cmap=cmap, levels=rlevs) colorbar() contour(x, y, r2, colors='w', levels=rlevs) contour(x, y, h2, colors='w', levels=thlevs) contour(x, y, orr, colors='k', levels=rlevs) contour(x, y, orth, colors='k', levels=thlevs) plt.xlim(0., xmax) plt.ylim(-xmax / 4., xmax / 2.) plt.savefig(prefix + "_ori.png") close() lmin = -2. lmax = 5. lbetalevs = (lmax - lmin) * arange(ono) / double(ono) + lmin norm = BoundaryNorm(lbetalevs, ncolors=cmap.N, clip=True) clf() fig = figure() contourf(r2 * sin(h2), r2 * cos(h2), log10(p / pm), levels=lbetalevs, norm=norm, cmap=cmap) colorbar() contour(r2 * sin(h2), r2 * cos(h2), aphi, colors='k') xlim(0., xmax) ylim(-xmax / 2., xmax / 2.) bhole(rhor) # need to put time in dinfo! title('t=' + str(t) + ' (' + prefix + ')') savefig(prefix + '_beta.eps') savefig(prefix + '_beta.png') close() vlevs = (arange(ono) / double(ono) * 2. - 1.) * 0.1 wv = where((r2 < 10.) & (r2 > 5.) & (fabs(cos(h2)) < 0.25)) # vlevs[0]=ur[wv].min()*1.5 # vlevs[ono-1]=ur[wv].max()*1.5 norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) clf() fig = figure() contourf(r2 * sin(h2), r2 * cos(h2), ur, levels=vlevs, norm=norm) colorbar() contour(r2 * sin(h2), r2 * cos(h2), ur, levels=[0.], color='w', linestyles='dotted') contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), levels=lrholevs, colors='w') xlim(0., xmax) ylim(-xmax / 2., xmax / 2.) bhole(rhor) title('t=' + str(t)) savefig(prefix + '_ur.eps') savefig(prefix + '_ur.png') close() clf() fig = figure() contourf(r2 * sin(h2), r2 * cos(h2), omega, levels=vlevs, norm=norm) colorbar() contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), levels=lrholevs, colors='w') xlim(0., xmax) ylim(-xmax / 2., xmax / 2.) bhole(rhor) title('t=' + str(t)) savefig(prefix + '_o.eps') savefig(prefix + '_o.png') close() nx = 20 ny = 20 xmin = 0. ymin = -xmax / 4. ymax = xmax / 2. xflow = (xmax - xmin) * (arange(nx) + 0.5) / double(nx) + xmin yflow = (ymax - ymin) * (arange(ny) + 0.5) / double(ny) + ymin x2, y2 = meshgrid(xflow, yflow) xgrid = (r2 * sin(h2)).flatten() ygrid = (r2 * cos(h2)).flatten() vxflow = (ur * sin(h2) + uh * cos(h2)).flatten() vyflow = (-uh * sin(h2) + ur * cos(h2)).flatten() vx = griddata(list(zip(xgrid, ygrid)), vxflow, (x2, y2), method='nearest') vy = griddata(list(zip(xgrid, ygrid)), vyflow, (x2, y2), method='nearest') vmin = 1e-8 # sqrt((vx**2+vy**2)).min()*9. vmax = 1.0 # sqrt((vx**2+vy**2)).max()*1.1 vlevs = log10( (old_div(vmax, vmin))**(old_div(arange(20), double(19))) * vmin) vlevs[0] = -30. norm = BoundaryNorm(vlevs, ncolors=cmap.N, clip=True) clf() fig = figure() contourf(r2 * sin(h2), r2 * cos(h2), log10(sqrt(ur**2 + uh**2)), levels=vlevs, norm=norm) colorbar() title('t=' + str(t)) contour(r2 * sin(h2), r2 * cos(h2), log10(rho + 1e-3), levels=lrholevs, colors='w') streamplot(xflow, yflow, vx, vy, color='k') xlim(xmin, xmax) ylim(ymin, ymax) xlabel(r'$\varpi$') ylabel(r'$z$') bhole(rhor) plot([arms(a), arms(a)], [-1., 1.], color='w') #, linestyle='dotted') fig.set_size_inches(10, 6) savefig(prefix + '_stream.eps') savefig(prefix + '_stream.png') close()
def tag_split(str): str = "".join(str) str = str.replace(", ", ",") return str.split(",")