def get(self, request, format=None): content = { 'user': unicode(request.user), # `django.contrib.auth.User` instance. 'auth': unicode(request.auth), # None } print('测试:', content) return Response(content)
def sql_load_fr_db(dbpath, sql=''): conn = sqlite.connect(np.unicode(dbpath),detect_types=sqlite.PARSE_DECLTYPES|sqlite.PARSE_COLNAMES) curs = conn.cursor() resultfromsql = curs.execute(np.unicode(sql)) #Send SQL-syntax to cursor result = resultfromsql.fetchall() resultfromsql.close() conn.close() return result
def apply_permission(): if version_info[0] == 3: windll.shell32.ShellExecuteW(None, "runas", executable, __file__, None, 1) else: # in python2.x from numpy import unicode windll.shell32.ShellExecuteW(None, u"runas", unicode(executable), unicode(__file__), None, 1) exit(0)
def latextablestr(table, cat): tablestr = r' ' for colnm in range(len(table.colnames)): tablestr += r'\textbf{%s}' % table.colnames[colnm] if colnm == len(table.colnames) - 1: tablestr += r' \\' else: tablestr += r' & ' for colnm in range(len(table.colnames)): if str(table[table.colnames[colnm]].unit) == 'None': unit = '' elif 'Flux' in table.colnames[colnm]: if cat.stackunit == msun: unit = r'(M$_\odot$)' elif cat.stackunit == gasfrac: unit = r'(M$_\mathrm{HI}$/M$_\ast$)' else: unit = '(Jy km/s)' elif np.unicode(table.colnames[colnm]) == np.unicode('FWHM'): unit = '(km/s)' else: unit = '' tablestr += r'%s' % unit if colnm == len(table.colnames) - 1: tablestr += r' \\\hline ' else: tablestr += r' & ' for rownm in range(len(table)): for colnm in range(len(table.colnames)): if (type(table[table.colnames[colnm]][rownm]) == str) or (type( table[table.colnames[colnm]][rownm]) == np.str_): tablestr += r'%s' % np.unicode( table[table.colnames[colnm]][rownm]) else: if (type(table[table.colnames[colnm]][rownm]) == int) or (type( table[table.colnames[colnm]][rownm]) == np.int_): tablestr += r'%i' % table[table.colnames[colnm]][rownm] elif ('FWHM' in table.colnames[colnm]) or ('chi' in table.colnames[colnm]): numstr = r'%.1f' % round( table[table.colnames[colnm]][rownm], 1) tablestr += numstr else: numstr = r'%10.2g' % table[table.colnames[colnm]][rownm] if 'E' in numstr or 'e' in numstr: numstr = latexexponent( table[table.colnames[colnm]][rownm]) tablestr += numstr else: tablestr += numstr if colnm == len(table.colnames) - 1: tablestr += r' \\\hline ' else: tablestr += r' & ' return tablestr
def mandar(símismo): print("IN MANDAR NOW") encabezado = símismo._encabezado() encabezado_bytes = json.dumps(encabezado, ensure_ascii=False).encode('utf8') # Mandar tmñ encabezado símismo.con.sendall(len(encabezado_bytes).to_bytes(1, byteorder="big")) print("Letting C know this size: ", len(encabezado_bytes)) sys.stdout.flush() msg = "" while len(msg) < 4: data = str(np.unicode(símismo.con.recv(1), errors='ignore')) msg += data print("Current msg: ", msg) if msg == "": exit(-3) sys.stdout.flush() if not msg == "RCVD": raise ConnectionError # Mandar encabezado json símismo.con.sendall(encabezado_bytes) print("Encabezado bytes: ", encabezado_bytes) sys.stdout.flush() msg = "" while len(msg) < 4: data = str(np.unicode(símismo.con.recv(1), errors='ignore')) msg += data print("Current msg: ", msg) if msg == "": exit(-3) sys.stdout.flush() if not msg == "RCVD": raise ConnectionError if símismo.orden is "TOMAR_": # Mandar contenido json símismo.con.sendall(símismo.contenido) print("Contenido bytes: ", símismo.contenido) sys.stdout.flush() msg = "" while len(msg) < 4: data = str(np.unicode(símismo.con.recv(1), errors='ignore')) msg += data print("Current msg: ", msg) if msg == "": exit(-3) sys.stdout.flush() if not msg == "RCVD": raise ConnectionError return símismo._procesar_respuesta()
def openDBFile( self ): # Open the SpatiaLite file to extract info about tables if os.path.isfile( np.unicode( self.database ) ): self.selected_database_QLineEdit.setText(self.database) self.table_ComboBox_1.clear() self.table_ComboBox_2.clear() self.table_ComboBox_3.clear() for i in range (1,3): self.clearthings(1) conn = sqlite.connect( np.unicode(self.database) ) cursor = conn.cursor() rs=cursor.execute(r"""SELECT tbl_name FROM sqlite_master WHERE (type='table' or type='view') and not (name in('geom_cols_ref_sys', 'geometry_columns', 'geometry_columns_time', 'spatial_ref_sys', 'spatialite_history', 'vector_layers', 'views_geometry_columns', 'virts_geometry_columns', 'geometry_columns_auth', 'geometry_columns_fields_infos', 'geometry_columns_statistics', 'sql_statements_log', 'layer_statistics', 'sqlite_sequence', 'sqlite_stat1' , 'views_layer_statistics', 'virts_layer_statistics', 'vector_layers_auth', 'vector_layers_field_infos', 'vector_layers_statistics', 'views_geometry_columns_auth', 'views_geometry_columns_field_infos', 'views_geometry_columns_statistics', 'virts_geometry_columns_auth', 'virts_geometry_columns_field_infos', 'virts_geometry_columns_statistics' , 'geometry_columns', 'spatialindex', 'SpatialIndex')) ORDER BY tbl_name""" ) #SQL statement to get the relevant tables in the spatialite database #self.dbTables = {} self.table_ComboBox_1.addItem('') self.table_ComboBox_2.addItem('') self.table_ComboBox_3.addItem('') for row in cursor: self.table_ComboBox_1.addItem(row[0]) self.table_ComboBox_2.addItem(row[0]) self.table_ComboBox_3.addItem(row[0]) rs.close() conn.close()
def SummarizeUrl(url): summaries = [] try: article = grab_link(url) except IOError: print('IOError') return None if not (article and article.cleaned_text and article.title): return None summaries = Summarize(unicode(article.title), unicode(article.cleaned_text)) return summaries
def run_command(self, com): try: if is_admin(): os.system(com) else: if sys.version_info[0] == 3: ctypes.windll.shell32.ShellExecuteW(None, "runas", sys.executable, __file__, None, 1) else: ctypes.windll.shell32.ShellExecuteW(None, u"runas", unicode(sys.executable), unicode(__file__), None, 1) except Exception as e: QMessageBox.information(self, 'Warning', e)
def on_epoch_end(self, epoch, logs=None): logs = {} if logs is None else logs def handle_value(k): is_zero_dim_ndarray = isinstance(k, np.ndarray) and k.ndim == 0 if isinstance(k, six.string_types): return k elif isinstance(k, Iterable) and not is_zero_dim_ndarray: return '"[%s]"' % (', '.join(map(str, k))) else: return k if self.keys is None: self.keys = sorted(logs.keys()) if self.model._stop_training: # We set NA so that csv parsers do not fail for this last epoch. logs = dict([(k, logs[k] if k in logs else 'NA') for k in self.keys]) if not self.writer: class CustomDialect(csv.excel): delimiter = self.sep fieldnames = ['epoch'] + self.keys if six.PY2: fieldnames = [np.unicode(x) for x in fieldnames] self.writer = csv.DictWriter(self.csv_file, fieldnames=fieldnames, dialect=CustomDialect) if self.append_header: self.writer.writeheader() row_dict = collections.OrderedDict({'epoch': epoch}) row_dict.update((key, handle_value(logs[key])) for key in self.keys) self.writer.writerow(row_dict) self.csv_file.flush()
def __init__(self, stream_address, port): context = zmq.Context() self.footage_socket = context.socket(SUB) self.footage_socket.bind('tcp://' + stream_address + ':' + port) self.footage_socket.setsockopt_string(SUBSCRIBE, np.unicode('')) self.current_frame = None self.keep_running = True
def generateEntity(path, filename): i = 0 f = io.open(os.path.join(path, filename), 'r', encoding='UTF-8') fw = io.open(os.path.join(r"/data/ylx/ylx/data/", "entities.txt"), 'w', encoding='UTF-8') for line in f: x = line.split("resource/", 1) value = str(x[1]) value = urllib.unquote(value) value = value.replace(" ", "·") #print(value) rm = "\n" value = value.rstrip(rm) # print(value+" "+line) newstr = value + " " + "20000" + " " + "n" + "\n" fw.write(unicode(newstr, 'UTF-8')) i = i + 1 f.close() fw.close() print("总共实体数:") print(i) return
def get_sol(self): """Get the solenoid based on debug""" name = np.unicode(self.ui.sol_combo.currentText()) solenoid = Magnet(name=name) if not self._debug else MockMagnet(name=name) return solenoid
def fetch_frame(self, handle): context = zmq.Context() footage_socket = context.socket(zmq.SUB) footage_socket.bind('tcp://*:5555') footage_socket.setsockopt_string(zmq.SUBSCRIBE, np.unicode('')) start_time = time.time() for i in range(1000): try: frame = footage_socket.recv_string() frame = frame.encode() img = base64.b64decode(frame) npimg = np.fromstring(img, dtype=np.uint8) source = cv2.imdecode(npimg, 1) # handle(source) except KeyboardInterrupt: cv2.destroyAllWindows() break elapsed = time.time() - start_time print(1000 / elapsed)
def __init__(self, server_address, send_port, recv_port, viewer, viewer_port): """ Tries to connect to the StreamViewer with supplied server_address and creates a socket for future use. :param server_address: Address of the computer on which the StreamViewer is running, default is `localhost` :param port: Port which will be used for sending the stream """ print("[SERVER] Connecting to", server_address, "at", send_port) context = zmq.Context() self.footage_socket = context.socket(zmq.PUB) self.footage_socket.connect('tcp://' + str(server_address) + ':' + str(send_port)) print("[VIEWER] Connecting to", viewer, "at", viewer_port) context_viewer = zmq.Context() self.footage_socket_viewer = context_viewer.socket(zmq.PUB) self.footage_socket_viewer.connect('tcp://' + str(viewer) + ':' + str(viewer_port)) print("[HERE] Listening on", recv_port) context_tiny = zmq.Context() self.footage_socket_tiny = context_tiny.socket(zmq.SUB) self.footage_socket_tiny.bind('tcp://*:' + str(recv_port)) self.footage_socket_tiny.setsockopt_string(zmq.SUBSCRIBE, np.unicode('')) self.keep_running = True self.keyframe = None
def __init__(self, camID: int, stracker: Dict[str, str], wr_lock: Lock, tracklet_dict: Dict[int, Tracklet], n_feature_samples: int = 8): super().__init__() # Integer camera ID. self.camID: int = camID # Single-camera tracker configs. # 'identifier' => int # 'ip' => str # 'footage_port' => str # 'tracker_port' => str # 'listen_port' => str self.stracker: Dict[str, str] = stracker self.stracker['identifier']: int = -1 # Tracker's "IP address : port". self.tracker_address: str = self.stracker['ip'] + ':' + self.stracker[ 'tracker_port'] # Lock for synchronization. self.data_lock: Lock = wr_lock # Running state. self.running: bool = True # Dict for storing tracklets. Require synchronization when reading/writing. self.tracklet_dict: Dict[int, Tracklet] = tracklet_dict # Number of sampled features for each tracklet. self.n_feature_samples: int = n_feature_samples # ZeroMQ context and sockets for broadcasting global querying results. context = zmq.Context() self.socket = context.socket(zmq.SUB) self.socket.connect('tcp://' + self.stracker['ip'] + ':' + self.stracker['tracker_port']) self.socket.setsockopt_string(zmq.SUBSCRIBE, np.unicode(''))
def __init__(self, port, webcam_server, send_port, send_port_tiny): """ Binds the computer to a ip address and starts listening for incoming streams. :param port: Port which is used for streaming """ context = zmq.Context() self.footage_socket = context.socket(zmq.SUB) self.footage_socket.bind('tcp://*:' + str(port)) self.footage_socket.setsockopt_string(zmq.SUBSCRIBE, np.unicode('')) context_send = zmq.Context() self.footage_socket_send = context_send.socket(zmq.PUB) self.footage_socket_send.connect('tcp://' + str(webcam_server) + ':' + str(send_port)) context_tiny = zmq.Context() self.footage_socket_tiny = context_tiny.socket(zmq.PUB) self.footage_socket_tiny.connect('tcp://' + str(webcam_server) + ':' + str(send_port_tiny)) self.current_frame = None self.keep_running = True self.n_dropped_frames = 0
def test_result_filter(): filter_0 = neurovault.ResultFilter(query_terms={'a': 0}, callable_filter=lambda d: len(d) < 5, b=1) assert_equal(np.unicode(filter_0), u'ResultFilter') assert_equal(filter_0['a'], 0) assert_true(filter_0({'a': 0, 'b': 1, 'c': 2})) assert_false(filter_0({'a': 0, 'b': 1, 'c': 2, 'd': 3, 'e': 4})) assert_false(filter_0({'b': 1, 'c': 2, 'd': 3})) assert_false(filter_0({'a': 1, 'b': 1, 'c': 2})) filter_1 = neurovault.ResultFilter(query_terms={'c': 2}) filter_1['d'] = neurovault.NotNull() assert_true(filter_1({'c': 2, 'd': 1})) assert_false(filter_1({'c': 2, 'd': 0})) filter_1['d'] = neurovault.IsIn(0, 1) assert_true(filter_1({'c': 2, 'd': 1})) assert_false(filter_1({'c': 2, 'd': 2})) del filter_1['d'] assert_true(filter_1({'c': 2, 'd': 2})) filter_1['d'] = neurovault.NotIn(0, 1) assert_false(filter_1({'c': 2, 'd': 1})) assert_true(filter_1({'c': 2, 'd': 3})) filter_1.add_filter(lambda d: len(d) > 2) assert_false(filter_1({'c': 2, 'd': 3})) assert_true(filter_1({'c': 2, 'd': 3, 'e': 4}))
def test_result_filter(): filter_0 = neurovault.ResultFilter(query_terms={'a': 0}, callable_filter=lambda d: len(d) < 5, b=1) assert np.unicode(filter_0) == u'ResultFilter' assert filter_0['a'] == 0 assert filter_0({'a': 0, 'b': 1, 'c': 2}) assert not filter_0({'a': 0, 'b': 1, 'c': 2, 'd': 3, 'e': 4}) assert not filter_0({'b': 1, 'c': 2, 'd': 3}) assert not filter_0({'a': 1, 'b': 1, 'c': 2}) filter_1 = neurovault.ResultFilter(query_terms={'c': 2}) filter_1['d'] = neurovault.NotNull() assert filter_1({'c': 2, 'd': 1}) assert not filter_1({'c': 2, 'd': 0}) filter_1['d'] = neurovault.IsIn(0, 1) assert filter_1({'c': 2, 'd': 1}) assert not filter_1({'c': 2, 'd': 2}) del filter_1['d'] assert filter_1({'c': 2, 'd': 2}) filter_1['d'] = neurovault.NotIn(0, 1) assert not filter_1({'c': 2, 'd': 1}) assert filter_1({'c': 2, 'd': 3}) filter_1.add_filter(lambda d: len(d) > 2) assert not filter_1({'c': 2, 'd': 3}) assert filter_1({'c': 2, 'd': 3, 'e': 4})
def __init__(self): self.context = zmq.Context() self.PUB = self.context.socket(zmq.PUB) self.PUB.bind("tcp://*:5555") self.SUB = self.context.socket(zmq.SUB) self.SUB.bind("tcp://*:5556") self.SUB.setsockopt_string(zmq.SUBSCRIBE, np.unicode('')) self.REP = self.context.socket(zmq.REP) self.REP.bind("tcp://*:5557") self.run() logging.info('Connection establihed') self.id.Identity() cube = vtk.vtkCubeSource() centerPointCoord = [0.0, 0.0, 0.0] cube.SetCenter(centerPointCoord) cube.SetXLength(20) cube.SetYLength(5) cube.SetZLength(10) cube.Update() modelsLogic = slicer.modules.models.logic() model = modelsLogic.AddModel(cube.GetOutput()) model.GetDisplayNode().SetSliceIntersectionVisibility(True) model.GetDisplayNode().SetSliceIntersectionThickness(3) model.GetDisplayNode().SetColor(1, 0, 0)
def safe_str(val, encoding='utf-8'): try: return str(val) except UnicodeEncodeError: if isinstance(val, Exception): return ' '.join([safe_str(arg, encoding) for arg in val]) return unicode(val).encode(encoding)
def one_letter(): """ Generate a trie that is used for a special case where only one letter is given to the autocomplete function. Since it's very expensive to go over all combinations each time, this function does it once and stores the result. :return: a one-letter trie, which also gets stored on the drive """ try: grams = pd.read_pickle(resources_path + 'words.pkl') except IOError: grams = load_words() short_grams = grams.copy() # short_grams['first'] = short_grams[['first']].apply(lambda x: x[0].lower()) short_grams['indices'] = short_grams.index res = short_grams.groupby("first").apply( lambda group: group.nlargest(50, columns='freq')) indices = res['indices'].values grams = grams.iloc[indices, :] grams['freq'] = grams['freq'].apply(lambda x: (x, )) freqs = grams['freq'].values phrases = grams['first'] + " " + grams['second'] fmt = "@i" phrases = list(map(lambda x: np.unicode(x), phrases)) triee = marisa.RecordTrie(fmt, zip(phrases, freqs)) with open(resources_path + 'short_trie.pkl', 'wb') as output: pickle.dump(triee, output, pickle.HIGHEST_PROTOCOL) return triee
def load_data(branch_limit=10000): """ Load the longest version of the trie, containing most n-grams(limited by the branch_limit) :param branch_limit: the limit of children for each node of the trie. Default 10000 :return: the trie, which also gets stored on the drive """ try: grams = pd.read_pickle(resources_path + 'words.pkl') except IOError: grams = load_words() grams = grams.sort_values(by='freq', ascending=False) # Limit the number of children for each node grams = grams.groupby("first").head(branch_limit) # The transformation from int to a singular tuple is required by the trie API grams['freq'] = grams['freq'].apply(lambda x: (x, )) freqs = grams['freq'].values phrases = grams['first'] + " " + grams['second'] fmt = "@i" phrases = list(map(lambda x: np.unicode(x), phrases)) triee = marisa.RecordTrie(fmt, zip(phrases, freqs)) # Store the trie with open(resources_path + 'trie.pkl', 'wb') as output: pickle.dump(triee, output, pickle.HIGHEST_PROTOCOL) return triee
def extract_emojis(full_text, emojis_match): # renvoie une liste séparée par des virgules d'emojis # Pour chaque emojis # mise au bon format du text => encoding text = np.unicode(full_text) cnt_emojis = 0 list_emojis = list() for e in emojis_match['Emoji']: if text.count(e) >= 1: cnt_emojis = cnt_emojis + 1 list_emojis.append(e) if cnt_emojis >= 1: str_emojis = '' for em in range(len(list_emojis)): if em == 0: str_emojis = list_emojis[em] else: str_emojis = str_emojis + "|" + list_emojis[em] return str_emojis else: return ''
def on_post(self, req, resp): input_json = json.loads(req.stream.read(), encoding='utf-8') input_json['ip'] = unicode(req.remote_addr) pred = 1 prob = 1 with ThreadPoolExecutor(max_workers=8) as pool: future_array = { pool.submit(self.get_value, f, input_json[f]): f for f in input_json } for future in as_completed(future_array): pred1, prob1 = future.result() pred = pred + pred1 prob = prob - prob1 resp.status = falcon.HTTP_200 res = math.exp(pred) - 1 if res < 0: res = 0 prob = math.exp(prob) if (prob <= .1): prob = .1 if (prob >= .9): prob = .9 multiplier = self.low + (self.high - self.low) * prob pred = multiplier * pred resp.body = str(pred)
def create_sub_socket(self): """Creates sub socket""" ctx = zmq.Context() self.socket = ctx.socket(zmq.SUB) self.socket.connect(f'tcp://{self.host}:{self.port}') self.socket.setsockopt_string(zmq.SUBSCRIBE, np.unicode(''))
def get_image(camera): """ A generator feeding frames of the video """ context = zmq.Context() footage_socket = context.socket(zmq.SUB) logging.debug('using image stream') logging.debug('listening to: tcp://*:' + camera['receiver']['port']) # listen the port specified in the configs footage_socket.bind('tcp://*:' + camera['receiver']['port']) footage_socket.setsockopt_string(zmq.SUBSCRIBE, np.unicode('')) logging.debug('connection acquired!') while True: try: frame = footage_socket.recv_string() # decode the image from text format img = base64.b64decode(frame) npimg = np.fromstring(img, dtype=np.uint8) source = cv2.imdecode(npimg, 1) yield source except KeyboardInterrupt: cv2.destroyAllWindows() break
def assert_extraction_as_expected(self, c1_form, config_file): config = views.load_config(config_file) transformed_path = './resources/forms/transformed/' found_files = [] for root, dirs, file_names in walk(transformed_path): found_files = fnmatch.filter(file_names, '*' + c1_form + '.jpg') self.assertEqual(1, len(found_files), msg="only one file should match pattern") output_path = tempfile.gettempdir() probability_map = extraction.extract(found_files[0], transformed_path, output_path, settings.STATIC_DIR, config) expected_json_file_path = './resources/probabilities/' + c1_form + '.json' with io.open(expected_json_file_path, 'r') as expected_json_file: expected = expected_json_file.read() self.maxDiff = None expected_json = json.loads(expected) actual_json = json.loads(probability_map) if (expected_json != actual_json) and self.overwrite_resources: json_string = json.dumps(actual_json) with io.open(expected_json_file_path, 'w') as expected_json_file: expected_json_file.write( unicode( json.dumps(actual_json, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True))) expected_json_file.flush() expected_json_file.close() self.assertEqual(expected_json, actual_json)
def parse(message): body = '' for part in message.walk(): if part.get_content_type() == "text/plain": if part.get_content_charset() is not None: body += unicode(part.get_payload(decode=True), errors='ignore') return body
def __init__(self, server_address, port): # Establish SUBSCRIBER connection, connects to address and port specified context = zmq.Context() self.footage_socket = context.socket(zmq.SUB) self.footage_socket.connect('tcp://' + server_address + ':' + port) self.footage_socket.setsockopt_string(zmq.SUBSCRIBE, np.unicode('')) self.current_frame = None
def popular_trie(): """ Generate a trie for the most popular words, like "to", "the", etc. Popular trie should be used if the branching factor for the long trie is large (>1000) :return: a popular trie, which also gets stored on the drive """ try: grams = pd.read_pickle(resources_path + 'words.pkl') except IOError: grams = load_words() big_ones = dict() for elem in (grams.groupby(['first']).sum()).iterrows(): count = elem[1]['freq'] if count > 7000: big_ones[elem[1].name] = count grams = grams.loc[grams['first'].isin(big_ones)] grams = grams.loc[grams['freq'] > 1000] grams['freq'] = grams['freq'].apply(lambda x: (x, )) freqs = grams['freq'].values phrases = grams['first'] + " " + grams['second'] fmt = "@i" phrases = list(map(lambda x: np.unicode(x), phrases)) triee = marisa.RecordTrie(fmt, zip(phrases, freqs)) with open(resources_path + 'popular_trie.pkl', 'wb') as output: pickle.dump(triee, output, pickle.HIGHEST_PROTOCOL) with open(resources_path + 'dict.pkl', 'wb') as output: pickle.dump(big_ones, output, pickle.HIGHEST_PROTOCOL) return triee
def message_handler(): """ Message handler function to connect sockets via TCP with the server. This function is within its own thread and listens for updates to stream video by setting the connection flag. """ global connection_flag global exit_signal global config_data connected = "connected" disconnected = "disconnected" messaging_socket = zmq.Context().socket(zmq.SUB) messaging_socket.bind('tcp://*:' + str(config_data['listening_port'])) messaging_socket.setsockopt_string(zmq.SUBSCRIBE, np.unicode('')) while not exit_signal.is_set(): try: incoming_message = messaging_socket.recv_string() if incoming_message == connected: connection_flag = True elif incoming_message == disconnected: connection_flag = False except: pass return
def Table3Changed(self): #This method is called whenever table3 is changed # First, update combobox with columns self.clearthings(3) self.table3 = np.unicode(self.table_ComboBox_3.currentText()) self.PopulateComboBox('xcol_ComboBox_3', self.table_ComboBox_3.currentText()) # GeneralNote: For some reason it is not possible to send currentText with the SIGNAL-trigger self.PopulateComboBox('ycol_ComboBox_3', self.table_ComboBox_3.currentText()) # See GeneralNote self.PopulateComboBox('Filter1_ComboBox_3', self.table_ComboBox_3.currentText()) # See GeneralNote self.PopulateComboBox('Filter2_ComboBox_3', self.table_ComboBox_3.currentText()) # See GeneralNote
def LoadColumnsFromTable(self, table=''): """ This method returns a list with all the columns in the table""" if len(table)>0 and len(self.database)>0: # Should not be needed since the function never should be called without existing table... conn = sqlite.connect(np.unicode(self.database)) curs = conn.cursor() sql = r"""SELECT * FROM '""" sql += np.unicode(table) sql += """'""" rs = curs.execute(sql) #Send the SQL statement to get the columns in the table columns = {} columns = [tuple[0] for tuple in curs.description] rs.close() conn.close() else: #QMessageBox.information(None,"info","no table is loaded") # DEBUGGING columns = {} return columns # This method returns a list with all the columns in the table
def normalizeWord(line): line = np.unicode(line, "utf-8") # Convert to UTF8 line = line.replace(u"„", u"\"") line = line.lower(); # To lower case # Replace all special charaters with the ASCII corresponding, but keep Umlaute # Requires that the text is in lowercase before replacements = dict(((u"ß", "SZ"), (u"ä", "AE"), (u"ü", "UE"), (u"ö", "OE"))) replacementsInv = dict(zip(replacements.values(), replacements.keys())) line = multiple_replace(line, replacements) line = unidecode(line) line = multiple_replace(line, replacementsInv) line = line.lower() # Unidecode might have replace some characters, like € to upper case EUR line = re.sub("([0-9][0-9.,]*)", '0', line) # Replace digits by NUMBER return line.strip();
[u"ы", 'a', 'z']], order=o) for o in 'CF'] # non-record (simple) numpy arrays _numpy_objs = [ np.arange(3), np.arange(6).reshape((2, 3), order='C'), np.arange(6).reshape((2, 3), order='F'), np.array(list('abcdef')), np.array("string"), np.array(u"ы"), # not an array but just an instance of that type np.float64(1), # problematic prior h5py 2.9.0 np.float128(1), np.unicode(u"ы"), np.unicode_(u"ы"), ] \ + _unicode_arrays \ + [a[:, ::2] for a in _unicode_arrays] # record arrays _numpy_objs += [ np.array([(1.0, 2), (3.0, 4)], dtype=[('x', float), ('y', int)]), np.array([(1.0, 'a'), (3.0, 'b')], dtype=[('x', float), ('y', 'S1')]), np.array([(1.0, u'ы'), (3.0, 'b')], dtype=[('x', float), ('y', '<U1')]), ] @sweepargs(obj=_python_objs + _numpy_objs) @with_tempfile() def test_save_load_python_objs(fname, obj):
def get(self, request, format=None): content = { 'user': unicode(request.user), 'auth': unicode(request.auth), } return Response(content)
def drawPlot(self): QtGui.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))#show the user this may take a long time... self.storesettings() #db, table, x-col and y-col are saved as default values when user clicks 'plot chart' self.axes.clear() My_format = [('date_time', datetime.datetime), ('values', float)] #Define (with help from function datetime) a good format for numpy array conn = sqlite.connect(np.unicode(self.selected_database_QLineEdit.text()),detect_types=sqlite.PARSE_DECLTYPES|sqlite.PARSE_COLNAMES)#should be cross-platform # skapa en cursor curs = conn.cursor() i = 0 nop=0# nop=number of plots self.p=[] self.plabels=[] if not (self.table1 == '' or self.table1==' ') and not (self.xcol1== '' or self.xcol1==' ') and not (self.ycol1== '' or self.ycol1==' '): #if anything is to be plotted from tab 1 self.maxtstep = self.spnmaxtstep.value() # if user selected a time step bigger than zero than thre may be discontinuous plots plottable1='y' filter1 = np.unicode(self.Filter1_ComboBox_1.currentText()) filter1list = self.Filter1_QListWidget_1.selectedItems() filter2 = np.unicode(self.Filter2_ComboBox_1.currentText()) filter2list= self.Filter2_QListWidget_1.selectedItems() nop += max(len(filter1list),1)*max(len(filter2list),1) #self.p= [None]*nop#list for plot objects self.p.extend([None]*nop)#list for plot objects self.plabels.extend([None]*nop)# List for plot labels while i < len(self.p): if not (filter1 == '' or filter1==' ') and not (filter2== '' or filter2==' '): for item1 in filter1list: for item2 in filter2list: sql = r""" select """ + np.unicode(self.xcol_ComboBox_1.currentText()) + """, """ + np.unicode(self.ycol_ComboBox_1.currentText()) + """ from """ + np.unicode(self.table_ComboBox_1.currentText()) + """ where """ + filter1 + """='""" + np.unicode(item1.text())+ """' and """ + filter2 + """='""" + np.unicode(item2.text())+ """' order by """ + np.unicode(self.xcol_ComboBox_1.currentText()) self.plabels[i] = np.unicode(item1.text()) + """, """ + np.unicode(item2.text()) self.createsingleplotobject(sql,i,My_format,curs,self.PlotType_comboBox_1.currentText()) i += 1 elif not (filter1 == '' or filter1==' '): for item1 in filter1list: sql = r""" select """ + np.unicode(self.xcol_ComboBox_1.currentText()) + """, """ + np.unicode(self.ycol_ComboBox_1.currentText()) + """ from """ + np.unicode(self.table_ComboBox_1.currentText()) + """ where """ + filter1 + """='""" + np.unicode(item1.text())+ """' order by """ + np.unicode(self.xcol_ComboBox_1.currentText()) self.plabels[i] = np.unicode(item1.text()) self.createsingleplotobject(sql,i,My_format,curs,self.PlotType_comboBox_1.currentText()) i += 1 elif not (filter2 == '' or filter2==' '): for item2 in filter2list: sql = r""" select """ + np.unicode(self.xcol_ComboBox_1.currentText()) + """, """ + np.unicode(self.ycol_ComboBox_1.currentText()) + """ from """ + np.unicode(self.table_ComboBox_1.currentText()) + """ where """ + filter2 + """='""" + np.unicode(item2.text())+ """' order by """ + np.unicode(self.xcol_ComboBox_1.currentText()) self.plabels[i] = np.unicode(item2.text()) self.createsingleplotobject(sql,i,My_format,curs,self.PlotType_comboBox_1.currentText()) i += 1 else: sql = r""" select """ + np.unicode(self.xcol_ComboBox_1.currentText()) + """, """ + np.unicode(self.ycol_ComboBox_1.currentText()) + """ from """ + np.unicode(self.table_ComboBox_1.currentText()) + """ order by """ + np.unicode(self.xcol_ComboBox_1.currentText()) self.plabels[i] = np.unicode(self.ycol_ComboBox_1.currentText())+""", """+np.unicode(self.table_ComboBox_1.currentText()) self.createsingleplotobject(sql,i,My_format,curs,self.PlotType_comboBox_1.currentText()) i += 1 if not (self.table2 == '' or self.table2==' ') and not (self.xcol2== '' or self.xcol2==' ') and not (self.ycol2== '' or self.ycol2==' '):#if anything is to be plotted from tab 2 self.maxtstep = self.spnmaxtstep.value() # if user selected a time step bigger than zero than thre may be discontinuous plots plottable2='y' filter1 = np.unicode(self.Filter1_ComboBox_2.currentText()) filter1list = self.Filter1_QListWidget_2.selectedItems() filter2 = np.unicode(self.Filter2_ComboBox_2.currentText()) filter2list= self.Filter2_QListWidget_2.selectedItems() nop =+ max(len(filter1list),1)*max(len(filter2list),1) self.p.extend([None]*nop)#list for plot objects self.plabels.extend([None]*nop)# List for plot labels while i < len(self.p): if not (filter1 == '' or filter1==' ') and not (filter2== '' or filter2==' '): for item1 in filter1list: for item2 in filter2list: sql = r""" select """ + np.unicode(self.xcol2) + """, """ + np.unicode(self.ycol2) + """ from """ + np.unicode(self.table2) + """ where """ + filter1 + """='""" + np.unicode(item1.text())+ """' and """ + filter2 + """='""" + np.unicode(item2.text())+ """' order by """ + np.unicode(self.xcol2) self.plabels[i] = np.unicode(item1.text()) + """, """ + np.unicode(item2.text()) self.createsingleplotobject(sql,i,My_format,curs,self.PlotType_comboBox_2.currentText()) i += 1 elif not (filter1 == '' or filter1==' '): for item1 in filter1list: sql = r""" select """ + np.unicode(self.xcol2) + """, """ + np.unicode(self.ycol2) + """ from """ + np.unicode(self.table2) + """ where """ + filter1 + """='""" + np.unicode(item1.text())+ """' order by """ + np.unicode(self.xcol2) self.plabels[i] = np.unicode(item1.text()) self.createsingleplotobject(sql,i,My_format,curs,self.PlotType_comboBox_2.currentText()) i += 1 elif not (filter2 == '' or filter2==' '): for item2 in filter2list: sql = r""" select """ + np.unicode(self.xcol2) + """, """ + np.unicode(self.ycol2) + """ from """ + np.unicode(self.table2) + """ where """ + filter2 + """='""" + np.unicode(item2.text())+ """' order by """ + np.unicode(self.xcol2) self.plabels[i] = np.unicode(item2.text()) self.createsingleplotobject(sql,i,My_format,curs,self.PlotType_comboBox_2.currentText()) i += 1 else: sql = r""" select """ + np.unicode(self.xcol2) + """, """ + np.unicode(self.ycol2) + """ from """ + np.unicode(self.table2) + """ order by """ + np.unicode(self.xcol2) self.plabels[i] = np.unicode(self.ycol2)+""", """+np.unicode(self.table2) self.createsingleplotobject(sql,i,My_format,curs,self.PlotType_comboBox_2.currentText()) i += 1 if not (self.table3 == '' or self.table3==' ') and not (self.xcol3== '' or self.xcol3==' ') and not (self.ycol3== '' or self.ycol3==' '):#if anything is to be plotted from tab 3 self.maxtstep = self.spnmaxtstep.value() # if user selected a time step bigger than zero than thre may be discontinuous plots plottable3='y' filter1 = np.unicode(self.Filter1_ComboBox_3.currentText()) filter1list = self.Filter1_QListWidget_3.selectedItems() filter2 = np.unicode(self.Filter2_ComboBox_3.currentText()) filter2list= self.Filter2_QListWidget_3.selectedItems() nop =+ max(len(filter1list),1)*max(len(filter2list),1) self.p.extend([None]*nop)#list for plot objects self.plabels.extend([None]*nop)# List for plot labels while i < len(self.p): if not (filter1 == '' or filter1==' ') and not (filter2== '' or filter2==' '): for item1 in filter1list: for item2 in filter2list: sql = r""" select """ + np.unicode(self.xcol3) + """, """ + np.unicode(self.ycol3) + """ from """ + np.unicode(self.table3) + """ where """ + filter1 + """='""" + np.unicode(item1.text())+ """' and """ + filter2 + """='""" + np.unicode(item2.text())+ """' order by """ + np.unicode(self.xcol3) self.plabels[i] = np.unicode(item1.text()) + """, """ + np.unicode(item2.text()) self.createsingleplotobject(sql,i,My_format,curs,self.PlotType_comboBox_3.currentText()) i += 1 elif not (filter1 == '' or filter1==' '): for item1 in filter1list: sql = r""" select """ + np.unicode(self.xcol3) + """, """ + np.unicode(self.ycol3) + """ from """ + np.unicode(self.table3) + """ where """ + filter1 + """='""" + np.unicode(item1.text())+ """' order by """ + np.unicode(self.xcol3) self.plabels[i] = np.unicode(item1.text()) self.createsingleplotobject(sql,i,My_format,curs,self.PlotType_comboBox_3.currentText()) i += 1 elif not (filter2 == '' or filter2==' '): for item2 in filter2list: sql = r""" select """ + np.unicode(self.xcol3) + """, """ + np.unicode(self.ycol3) + """ from """ + np.unicode(self.table3) + """ where """ + filter2 + """='""" + np.unicode(item2.text())+ """' order by """ + np.unicode(self.xcol3) self.plabels[i] = np.unicode(item2.text()) self.createsingleplotobject(sql,i,My_format,curs,self.PlotType_comboBox_3.currentText()) i += 1 else: sql = r""" select """ + np.unicode(self.xcol3) + """, """ + np.unicode(self.ycol3) + """ from """ + np.unicode(self.table3) + """ order by """ + np.unicode(self.xcol3) self.plabels[i] = np.unicode(self.ycol3)+""", """+np.unicode(self.table3) self.createsingleplotobject(sql,i,My_format,curs,self.PlotType_comboBox_3.currentText()) i += 1 #rs.close() # close the cursor conn.close() # close the database self.refreshPlot() QtGui.QApplication.restoreOverrideCursor()#now this long process is done and the cursor is back as normal
def PopulateFilterList(self, table, QListWidgetname='', filtercolumn=None): sql = "select distinct " + np.unicode(filtercolumn) + " from " + table + " order by " + np.unicode(filtercolumn) list_data=sql_load_fr_db(self.database, sql) for post in list_data: item = QtGui.QListWidgetItem(np.unicode(post[0])) getattr(self, QListWidgetname).addItem(item)
if '|' in names: names = names.split(' | ')[1] return code, names.strip() get_english('2.Urgences logistiques | Vital Lines') all_cats=get_all_categories(data.CATEGORY) english_mapping = dict(get_english(x) for x in all_cats) english_mapping['2a'] english_mapping['6c'] def get_code(seq): return [x.split('.')[0] for x in seq if x] all_codes=get_code(all_cats) code_index=pd.Index(np.unicode(all_codes)) dummy_frame=pd.DataFrame(np.zeros((len(data),len(all_codes))),index=data.index,columns=all_codes) for row,cat in zip(data.index,data.CATEGORY): codes=get_code(to_cat_list(cat)) dummy_frame.ix[row,codes]=1 data=data.join(dummy_frame.add_prefix('category_')) from mpl_toolkits.basemap import Basemap def basic_haiti_map(ax=None, lllat=17.25, urlat=20.25,lllon=-75, urlon=-71): m = Basemap(ax=ax, projection='stere', lon_0=(urlon + lllon) / 2, lat_0=(urlat + lllat) / 2, llcrnrlat=lllat, urcrnrlat=urlat, llcrnrlon=lllon, urcrnrlon=urlon, resolution='f')