def exportFragilityValuesToExcel(filename, filename2, databasename): import os, subprocess from Connection import Connection maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() sql = "Copy (Select idfragility, level, param1, param2 From fragility_curve_level order by idfragility, level) To '" + filename + "' With CSV DELIMITER ',' HEADER;" file = "C:/Data/Python/sql_temporal.txt" writefile(file, sql) cmd = "psql -U " + con.user + maquina + " " + databasename + " < " + file print cmd subprocess.call(cmd, shell=True) sql = "Copy (Select * from fragility_curve_structure order by type_structure) To '" + filename2 + "' With CSV DELIMITER ',' HEADER;" file = "C:/Data/Python/sql_temporal.txt" writefile(file, sql) cmd = "psql -U " + con.user + maquina + " " + databasename + " < " + file print cmd subprocess.call(cmd, shell=True)
def createConnection(self, username, password, targetIp, targetPort, timeoutTime): connection = Connection(username, password, targetIp, targetPort, timeoutTime) connection.start() self.connections.append(connection) self.currentThreadCount += 1 if self.verbose: print "[*] Adding Target: %s, Testing with username: %s, testing with password: %s" % (targetIp, username, password)
def main(): connect = Connection() try: opts, args = getopt.getopt(sys.argv[1:], "d:u:h:", ["help", "database=", "user="******"host="]) except getopt.GetoptError: print print_help() sys.exit(2) for opt, arg in opts: if opt == '--help': print print_help() sys.exit() elif opt in ('-d', '--database'): connect.database = arg elif opt in ('-u', '--user'): connect.user = arg elif opt in ('-h', '--host'): connect.host = arg try: connect.password = getpass.getpass() prompt = Cmd() if connect.database: prompt.tables = prompt.requestTables("") prompt.databases = prompt.requestDatabases("") prompt.to_dir = os.path.join(os.path.expanduser("~"), "imppy-dump") readline.set_completer_delims(' \t\n;') prompt.cmdloop("Welcome") except KeyboardInterrupt: print "" print "Goodbye"
def deleteFragilitiesCurves(databasename, idfragility): import os, subprocess from Connection import Connection maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() sql = "delete from fragility_curve_structure where idfragility='" + str(idfragility) + "';\n" sql += "delete from fragility_curve_level where idfragility='" + str(idfragility) + "';\n" sql += "delete from fragility_curve where idfragility='" + str(idfragility) + "';\n" temp_dir = Directory.getPathTempDir() #file = "C:/Data/Python/sql_temporal.txt" file = temp_dir + "sql_temporal.txt" w.writefile(file, sql) sql_temp = Directory.getPathTempDir() file_err = sql_temp + "errores.txt" #cmd = "psql -U " + con.user + maquina + " " + databasename + " < " + file + " > C:/Data/Python/errores.txt" cmd = "psql -U " + con.user + maquina + " " + con.database + " < " + file + " > " + file_err print cmd subprocess.call(cmd, shell=True)
def convert_ctrl_to_sig_connections_at_inlet(self, connection_list, inlet_index): """ Auto insert heavy var object inbetween control connections. """ sig_obj = HeavyObject( obj_type="var", obj_args=[0], pos_x = int(self.pos_x), pos_y = int(self.pos_y-5)) # shift upwards a few points # add sig~ object to parent graph self.parent_graph.add_object(sig_obj) # add connection from sig~ to this object c = Connection(sig_obj, 0, self, inlet_index, "~f>") self.parent_graph._PdGraph__connections.append(c) # update the local connections list sig_obj.add_connection(c) self.add_connection(c) # retrieve all control connections control_conns = [c for c in connection_list if c.conn_type == "-->"] for old_conn in control_conns: # get from obj from_obj = old_conn.from_obj # add connection from fromobj to new sig new_conn = Connection(from_obj, old_conn.outlet_index, sig_obj, 0, "-->") self.parent_graph._PdGraph__connections.append(new_conn) sig_obj.add_connection(new_conn) from_obj.add_connection(new_conn) # remove connection from fromobj self.parent_graph._PdGraph__connections.remove(old_conn) from_obj.remove_connection(old_conn) self.remove_connection(old_conn)
def testPex(self, file_server, site, site_temp): file_server.sites[site.address] = site client = FileServer(file_server.ip, 1545) client.sites = {site_temp.address: site_temp} site_temp.connection_server = client connection = client.getConnection(file_server.ip, 1544) # Add new fake peer to site fake_peer = site.addPeer(file_server.ip_external, 11337, return_peer=True) # Add fake connection to it fake_peer.connection = Connection(file_server, file_server.ip_external, 11337) fake_peer.connection.last_recv_time = time.time() assert fake_peer in site.getConnectablePeers() # Add file_server as peer to client peer_file_server = site_temp.addPeer(file_server.ip, 1544) assert "%s:11337" % file_server.ip_external not in site_temp.peers assert peer_file_server.pex() assert "%s:11337" % file_server.ip_external in site_temp.peers # Should not exchange private peers from local network fake_peer_private = site.addPeer("192.168.0.1", 11337, return_peer=True) assert fake_peer_private not in site.getConnectablePeers(allow_private=False) fake_peer_private.connection = Connection(file_server, "192.168.0.1", 11337) fake_peer_private.connection.last_recv_time = time.time() assert "192.168.0.1:11337" not in site_temp.peers assert not peer_file_server.pex() assert "192.168.0.1:11337" not in site_temp.peers connection.close() client.stop()
def post(self): conn = Connection() try: codigo_unidade = str(request.json['codigo_unidade']) numero_pavilhao = str(request.json['fk_numero_pavilhao']) numero_bloco = str(request.json['fk_numero_bloco']) except: codigo_unidade = "null" numero_pavilhao = "null" numero_bloco = "null" codigo_cela = str(request.json['codigo']) capacidade = str(request.json['capacidade']) tipo = Util.formatString(request.json['tipo']) command = ( "insert into celas values (" + "(" + "select ref(b) from blocos b where " + "b.numero = " + numero_bloco + " and " + "b.pavilhao.numero = " + numero_pavilhao + " and " + "b.pavilhao.unidade_prisional.codigo = " + codigo_unidade + "), " + codigo_cela + ", " + capacidade + ", " + tipo + ")" ) return conn.update(command)
def calculateinterdependencies(geom): createinterdependency(geom) import os, subprocess from Connection import Connection maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() # Choose your PostgreSQL version here # os.environ['PATH'] += r';C:\Program Files\PostgreSQL\9.5\bin' # D:\usbgis\apps\postgresql93\bin os.environ['PATH'] += r';D:\usbgis\apps\postgresql93\bin' # http://www.postgresql.org/docs/current/static/libpq-envars.html os.environ['PGHOST'] = 'localhost' os.environ['PGPORT'] = '5432' os.environ['PGUSER'] = '******' os.environ['PGPASSWORD'] = '******' os.environ['PGDATABASE'] = 'roads' cmd = "psql -d " + con.database + " -U " + con.user + maquina + " < D:/Data/Python/create_interdependencies.txt" print cmd subprocess.call(cmd, shell=True)
def post(self): conn = Connection() unidade = {} unidade['codigo'] = str(request.json['codigo']) unidade['nome'] = "'" + request.json['nome'] + "'" endereco = {} endereco['tipo_logadouro'] = "'" + request.json['tipo_logradouro'] + "'" endereco['logradouro'] = "'" + request.json['logradouro'] + "'" endereco['numero'] = "'" + request.json['num'] + "'" endereco['bairro'] = "'" + request.json['bairro'] + "'" endereco['cidade'] = "'" + request.json['cidade'] + "'" endereco['uf'] = "'" + request.json['uf'] + "'" endereco['cep'] = "'" + request.json['cep'] + "'" command = ( "insert into unidades_prisionais values (" "Unidade_Prisional_TY (" + unidade['codigo'] + "," + unidade['nome'] + "," + "Endereco_TY (" + endereco['tipo_logadouro'] + "," + endereco['logradouro'] + "," + endereco['numero'] + "," + endereco['bairro'] + "," + endereco['cidade'] + "," + endereco['uf'] + "," + endereco['cep'] + ")" ")" ")") return conn.update(command)
def post(self): conn = Connection() try: codigo_unidade = str(request.json['codigo_unidade']) numero_pavilhao = str(request.json['fk_numero_pavilhao']) numero_bloco = str(request.json['fk_numero_bloco']) except: codigo_unidade = "null" numero_pavilhao = "null" numero_bloco = "null" codigo_cela = str(request.json['codigo']) capacidade = str(request.json['capacidade']) tipo = Util.formatString(request.json['tipo']) command = ( "update celas c set " + "c.capacidade = " + capacidade + ", " + "c.tipo = " + tipo + " where " + "c.codigo = " + codigo_cela + " and " + "c.bloco.numero = " + numero_bloco + " and " + "c.bloco.pavilhao.numero = " + numero_pavilhao + " and " + "c.bloco.pavilhao.unidade_prisional.codigo = " + codigo_unidade ) return conn.update(command)
def addNode(self, innovationHistory): if len(self.genes) == 0: self.addConnection(innovationHistory) return None rand = random.randint(0, len(self.genes)-1) temp = self.genes[rand] while temp.fromNode == self.biasNode and len(self.genes) != 1: temp = self.genes[random.randint(0, len(self.genes)-1)] temp.enabled = False toAdd = Node(self.nextNode) connectionInnovationNumber = self.getInnovationNumber(innovationHistory, toAdd, temp.toNode) self.genes.append(Connection(temp.fromNode, toAdd, 1, connectionInnovationNumber)) toAdd.layer = temp.fromNode.layer + 1 connectionInnovationNumber = self.getInnovationNumber(innovationHistory, self.biasNode, toAdd) self.genes.append(Connection(self.biasNode, toAdd, 0, connectionInnovationNumber)) if toAdd.layer == temp.toNode.layer: for n in self.nodes: if n.layer >= toAdd.layer: n.layer += 1 self.layers += 1 self.nodes.append(toAdd) self.connect()
def exportLossesToText(filenameout, databasename): import os, subprocess from Connection import Connection maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() #sql = "Copy (select gid, name, area, material, unitcost, totalcost, damage from popoliloss order by gid) To '" + filenameout+ "' With CSV DELIMITER ',' HEADER;" sql = "Copy (Select gid, name, ROUND(unitcost,2) From popoliloss order by gid) To '" + filenameout + "' With CSV DELIMITER ',' ;" dir_temp = Directory.getPathTempDir() #file = "C:/Data/Python/sql.txt" file = dir_temp + "sql.txt" w.writefile(file, sql) logging.debug("sql") logging.debug(sql) cmd = "psql -U " + con.user + maquina + " " + databasename + " < " + file print cmd subprocess.call(cmd, shell=True)
def get(self): conn = Connection() codigo_unidade = str(request.args['fk_codigo_unidade']) numero_pavilhao = str(request.args['fk_numero_pavilhao']) numero_bloco = str(request.args['fk_numero_bloco']) codigo_cela = str(request.args['codigo']) command = ( "select " + Util.formatQuery("p", "prisioneiro") + " from prisioneiros p" + " where p.cela =" + " (select ref(c) from celas c where c.codigo = " + codigo_cela + ")" ) prisioneiros = conn.query(command) if (prisioneiros['success'] == False): return prisioneiros resultado = [] for data in prisioneiros['data']: resultado.append(Util.formatResponse(data, prisioneiros['columns'], [])) return resultado
def importLossesFromText(filename, databasename): import os, subprocess from Connection import Connection maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() #sql = "Copy popoliloss from '" + filename+ "' delimiter |" sql = "Copy popoliloss(gid, name, unitcost) from '" + filename + "' delimiter ','" dir_temp = Directory.getPathTempDir() #file = "C:/Data/Python/sql_temporal.txt" file = dir_temp + "sql_temporal.txt" w.writefile(file, sql) logging.debug("sql") logging.debug(sql) cmd = "psql -U " + con.user + maquina + " " + databasename + " < " + file print cmd subprocess.call(cmd, shell=True) print " Loss imported from '" + filename + "'"
def insertFragilityCurves(databasename, id): import os, subprocess from Connection import Connection maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() sql = "insert into fragility_curve (idfragility, name) values('" + str(id)+"','curva')" print " sql fragility insert " + sql temp_dir = Directory.getPathTempDir() #file = "C:/Data/Python/sql.txt" file = temp_dir + "sql.txt" w.writefile(file, sql) cmd = "psql -U " + con.user + maquina + " " + databasename + " < " + file print cmd subprocess.call(cmd, shell=True)
def deleteLoss(databasename): import os, subprocess from Connection import Connection maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() sql = "delete from popoliloss " #file = "C:/Data/Python/sql_temporal.txt" dir_temp = Directory.getPathTempDir() file = dir_temp + "sql_temporal.txt" w.writefile(file, sql) logging.debug("sql") logging.debug(sql) cmd = "psql -U " + con.user + maquina + " " + databasename + " < " + file print cmd subprocess.call(cmd, shell=True)
def post(self): conn = Connection() unidade = {} unidade['codigo'] = str(request.json['codigo']) unidade['nome'] = Util.formatString(request.json['nome']) unidade['tipo_logadouro'] = Util.formatString(request.json['tipo_logradouro']) unidade['logradouro'] = Util.formatString(request.json['logradouro']) unidade['numero'] = Util.formatString(request.json['num']) unidade['bairro'] = Util.formatString(request.json['bairro']) unidade['cidade'] = Util.formatString(request.json['cidade']) unidade['uf'] = Util.formatString(request.json['uf']) unidade['cep'] = Util.formatString(request.json['cep']) command = ( "update unidades_prisionais up set " + "up.nome = " + unidade['nome'] + ", " + "up.endereco.tipo_logadouro = " + unidade['tipo_logadouro'] + ", " + "up.endereco.logradouro = " + unidade['logradouro'] + ", " + "up.endereco.numero = " + unidade['numero'] + ", " + "up.endereco.bairro = " + unidade['bairro'] + ", " + "up.endereco.cidade = " + unidade['cidade'] + ", " + "up.endereco.uf = " + unidade['uf'] + ", " + "up.endereco.cep = " + unidade['cep'] + " where up.codigo = " + unidade['codigo'] ) return conn.update(command)
def __init__(self, host, port): self.connection = Connection(host, port) if self.connection.isConnected: while (True): self.connection.resetSecreto() opcion = Menu.menuLogin() if opcion == 1: self.connection.send({'operacion': 'Login'}) response = self.connection.receive() if (response['operacion'] == 'Login' and response['resultado'] == 'OK'): logged = self.login() if (logged): self.menu() elif opcion == 2: self.connection.send({'operacion': 'Register'}) response = self.connection.receive() if (response['operacion'] == 'Register' and response['resultado'] == 'OK'): self.register() elif opcion == 3: self.connection.send({'operacion': 'Recover'}) response = self.connection.receive() if (response['operacion'] == 'Recover' and response['resultado'] == 'OK'): self.recoverPassword() elif opcion == 4: print("Sesion terminada.") self.connection.send({'operacion': 'Exit'}) # No es necesario hacer nada, el hilo muere. self.connection.close() break
def assignFragilitiesCurvesToBuildings(databasename): import os, subprocess from Connection import Connection maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() sql = "" for x in range(0, 8800): # Y = divmov(x, 3) y = x % 2 sqlinsert = "insert into fragility_curve_structure values('" + str( x) + "', '" + str(y + 1) + "');\n" #print sql sql += sqlinsert file = "C:/Data/Python/sql_temporal.txt" writefile(file, sql) cmd = "psql -U " + con.user + maquina + " " + databasename + " < " + file + " > C:/Data/Python/errores.txt" print cmd subprocess.call(cmd, shell=True)
def deleteValutazione(databasename): import os, subprocess from Connection import Connection con2 = Connection() con = con2.getConnection() sql = "delete from valutazione " #maquina = " -h 127.0.0.1 -p 5434" maquina = " -h " + con.host + " -p " + con.port from Utils import Directory2 dir = Directory2.getPathScripts() # file = "C:/Data/Python/sql_temporal.txt" file = dir + "sql_temporal.txt" Writes.writefile(file, sql) logging.debug("sql") logging.debug(sql) cmd = "psql -U " + con.user + maquina + " " + con.database + " < " + file print cmd subprocess.call(cmd, shell=True)
def exportFragilitiesAssignationToExcel(filename, databasename): import os, subprocess from Connection import Connection maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() #sql = "Copy (Select * From valutazione order by idstructure, idparam) To '" + filename+ "' With CSV DELIMITER ',' HEADER;" sql = "Copy (Select * From fragility_curve_structure order by type_structure, idfragility) To '" + filename + "' With CSV DELIMITER ',';" print sql temp_dir = Directory.getPathSqlDir() file = temp_dir + "sql_temporal.txt" w.writefile(file, sql) logging.debug("sql") logging.debug(sql) cmd = "psql -U " + con.user + maquina + " " + con.database + " < " + file print cmd subprocess.call(cmd, shell=True) from Utils import XLSConverterDefinitivo XLSConverterDefinitivo.convertToXLS(filename)
def createFragilityTable(databasename): import os, subprocess from Connection import Connection maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() sql_dir = Directory.getPathSqlDir() #file = "C:\Users\AG\.qgis2\python\plugins\SeismicRisk\Database\SQL\create_fragility.txt" file = sql_dir + "create_fragility.txt" sql_temp = Directory.getPathTempDir() file_err = sql_temp + "errores.txt" #cmd = "psql -U " + con.user + maquina + " " + databasename + " < " + file + " > C:/Data/Python/errores.txt" cmd = "psql -U " + con.user + maquina + " " + con.database + " < " + file + " > " + file_err print " Fragility Curves 435 " print cmd logging.debug(cmd) subprocess.call(cmd, shell=True)
def listener(self): """ Listener des Button "Connect" """ ip = self.dbcon.lineEdit.text() um = self.dbcon.lineEdit_2.text() pw = self.dbcon.lineEdit_3.text() db = self.dbcon.lineEdit_4.text() """ip = 'localhost' um = 'ww' pw = 'ww' db = 'wienwahl'""" dbms = 'MYSQL' if dbms == 'MYSQL': link = 'mysql+mysqldb://' link += um + ':' + pw + '@' + ip + '/' + db + '?charset=utf8' self.conn = Connection(link,self.model) self.conn.connect() self.window.close()
class User: def __init__(self, id, kdc_connection, other_user_connection, other_user_id): self.id = int(id) self.kdc_connection = Connection(kdc_connection) self.other_user_connection = Connection(other_user_connection) self.other_user_id = other_user_id self._key = random.randint(1, (1 << 10) - 1) # generate a random 10 bit key for DES self.timestamps = set() self.session_keys = {} # respond to diffie hellman request with A and the user's private key # encrypted with the shared secret @wait_for_message def diffie_hellman_response(self): message = self.kdc_connection.get_message() if message != '': p = stoo(message[:512]) g = stoo(message[512:1024]) B = stoo(message[1024:]) a = random.randint(3, p - 2) A = pow(g, a, p) s = pow(B, a, p) s %= (1 << 10) # only need last 10 bits for DES encrypt encrypted_key = encrypt(self._key, s) message = otos(A, 512) + otos(encrypted_key, 2) self.kdc_connection.send_message(message) return True # use SHA-512 as a confirmation hash function @staticmethod def confirm(message): return sha512(bytes(message, 'utf-8')).hexdigest()
def deleteFragilitiesAssignation(databasename): import os, subprocess from Connection import Connection maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() sql = "delete from fragility_curve_structure " temp_dir = Directory.getPathTempDir() #file = "C:/Data/Python/sql_temporal.txt" file = temp_dir + "sql_temporal.txt" w.writefile(file, sql) logging.debug("sql") logging.debug(sql) cmd = "psql -U " + con.user + maquina + " " + con.database + " < " + file print cmd subprocess.call(cmd, shell=True)
def importFragilitiesAssignationFromText(filename, databasename): import os, subprocess from Connection import Connection maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() sql = "Copy fragility_curve_structure from '" + filename+ "' delimiter ','" print sql temp_dir = Directory.getPathTempDir() file = temp_dir + "sql_temporal.txt" w.writefile(file, sql) logging.debug("sql") logging.debug(sql) cmd = "psql -U " + con.user + maquina + " " + con.database + " < " + file print cmd subprocess.call(cmd, shell=True)
def train(self, counter=0, weights=[], value=0, rate=0.5): weights = weights goal = 2 if value > goal + 0.00001 or value < goal - 0.00001: if counter == 0: floats = [] float = 0.0 while float < 1.0: floats.append(float) float += 0.01 for out_neuron in self.output_neurons: out_neuron.connections = [] for in_neuron in self.input_neurons: weight = random.choice(floats) weights.append(weight) out_neuron.add_connection(Connection(in_neuron, weight)) else: for out_neuron in self.output_neurons: out_neuron.connections = [] for i in range(0, len(self.input_neurons)): weight_change = rate * (goal - value) * self.input_neurons[i].get_value() weight = weights[i] + weight_change weights[i] = weight out_neuron.add_connection(Connection(self.input_neurons[i], weight)) return False, weights elif goal + 0.00001 > value > goal - 0.00001: return True, weights
def handleIncomingConnection(self, sock, addr): ip, port = addr[0:2] ip = ip.lower() if ip.startswith("::ffff:"): # IPv6 to IPv4 mapping ip = ip.replace("::ffff:", "", 1) self.num_incoming += 1 if not self.had_external_incoming and not helper.isPrivateIp(ip): self.had_external_incoming = True # Connection flood protection if ip in self.ip_incoming and ip not in self.whitelist: self.ip_incoming[ip] += 1 if self.ip_incoming[ip] > 6: # Allow 6 in 1 minute from same ip self.log.debug("Connection flood detected from %s" % ip) time.sleep(30) sock.close() return False else: self.ip_incoming[ip] = 1 connection = Connection(self, ip, port, sock) self.connections.append(connection) if ip not in config.ip_local: self.ips[ip] = connection connection.handleIncomingConnection(sock)
def create_full_mesh(self, weights): if len(self.hidden_neurons) == 0: if len(weights) != len(self.input_neurons) * len(self.output_neurons): raise Exception index = 0 for w_neuron in self.output_neurons: for i_neuron in self.input_neurons: w_neuron.add_connection(Connection(i_neuron, weights[index])) index += 1 else: if len(weights) != len(self.input_neurons) * len(self.hidden_neurons) + len(self.hidden_neurons) * len(self.output_neurons): raise Exception index = 0 for hid_neuron in self.hidden_neurons: for in_neuron in self.input_neurons: hid_neuron.add_connection(Connection(in_neuron, weights[index])) index += 1 for out_neuron in self.output_neurons: for hid_neuron in self.hidden_neurons: out_neuron.add_connection(Connection(hid_neuron, weights[index])) index += 1
def createMaterial(databasename): maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() sql = "alter table popoliloss add material text;" sql = "alter table popoliloss add material text;" print " sql losses " + sql dir_temp = Directory.getPathTempDir() #file = "C:/Data/Python/sql.txt" file = dir_temp + "sql.txt" w.writefile(file, sql) file_err = dir_temp + "errores.txt" cmd = "psql -U " + con.user + maquina + " " + databasename + " < " + file + " > " + file_err subprocess.call(cmd, shell=True)
def importValutazioneFromText(filename, databasename): import os, subprocess from Connection import Connection #maquina = " -h 127.0.0.1 -p 5434" con2 = Connection() con = con2.getConnection() sql = "Copy valutazione from '" + filename + "' delimiter ','" print "\n" print sql from Utils import Directory2 dir = Directory2.getPathScripts() # file = "C:/Data/Python/sql_temporal.txt" file = dir + "sql_temporal.txt" Writes.writefile(file, sql) logging.debug("sql") logging.debug(sql) maquina = " -h " + con.host + " -p " + con.port cmd = "psql -U " + con.user + maquina + " " + con.database + " < " + file print cmd subprocess.call(cmd, shell=True)
def __init__(self): print("ZeMo is Running") self.screen = Screen() self.screen.drawImage("logo.png", self.screen.background.get_rect(), 223, 57) self.conn = Connection() self.done = False self.takeReadFlag = True self.readingNow = False self.waitTime = 0 jsonFile = self.conn.getJSONconfig() self.daysToKeep = jsonFile["settings"]["days"] self.readsPerDay = jsonFile["settings"]["reads"] self.timeList = [] piName = self.conn.getPiName() self.phSensor = PH(jsonFile, piName, self.screen) self.condSensor = Conductivity(jsonFile, piName, self.screen) self.dOSensor = DissolvedOxygen(jsonFile, piName, self.screen) self.tempSensor = Temperature(jsonFile, piName, self.screen) self.sensorList = [] self.sensorList.append(self.tempSensor) self.sensorList.append(self.condSensor) self.sensorList.append(self.phSensor) self.sensorList.append(self.dOSensor) for sensor in self.sensorList: sensor.takeRead(self.conn) self.t2 = Thread(target=App.checkTime_loop, args=(self,)) self.t2.start() self.update_reads_per_day()
def addTimes(self): for ip in self.IPS: start = time() conn = Connection(ip) if conn.httpsConn(): end = time() self.RES.append([ip, end - start]) return len(self.RES)
def worker(self, name, ips): res = [] for ip in ips: conn = Connection(ip) if conn.httpsConn(): if isGoogleSearch('http', ip) or isGoogleSearch('https', ip): print 'Worker ' + name + ': ', ip res.append(ip) appendLst2File(self.Output, res)
def createConnection(self, username, password, targetIp, targetPort, timeoutTime): connection = Connection(username, password, targetIp, targetPort, timeoutTime) connection.start() self.connections.append(connection) self.currentThreadCount += 1 if self.verbose: print "[*] Adding Target: {0}:{1}, Testing with username: {2}, testing with password: {3}".format( targetIp, str(targetPort), username, password )
def _start_new_connection(self, connection_id): """ Starts up a new connection thread to wait for a new client to connect :param connection_id: identified for the connection object :return: """ print("start_new_connection") connection = Connection(self.socket, self.connection_channel) self.connections.append(connection) connection.start(connection_id)
def ballRolling(self): print 'ball rolling' print sys.path con = Connection("/home/dur/Projects/WebSocket/config.conf") con.connect() # test = TestSender(con) # test.start() # while True: # message = con.get_message() # print "Got %s" %message return
def nextTest(): if len(classes) == 0: #oh the hacks.. oh the horror Connection.getInstance().close() app.Exit() else: c = classes.pop(0) test = unittest.TestLoader().loadTestsFromTestCase(c) def onReady(): unittest.TextTestRunner(verbosity=2).run(test) c.customTeardown() wx.FutureCall(1, nextTest) c.customSetup(onReady)
def add_new_connection(self, packet): conn = Connection() tcp = packet.getlayer(TCP) conn.set_dst_ip(packet.getlayer(IP).dst) conn._curr_dst_IP = packet.getlayer(IP).dst conn._curr_src_IP = packet.getlayer(IP).src conn._curr_dst_port = tcp.dport conn._curr_src_port = tcp.sport conn.increase_volume(len(packet.getlayer(IP))) self._active_connections.append(conn) conn._current_packet_number += 1
def test_getInstance(self): """ Verify that the instance returned is indeed a singleton and that it is an instance of the Connection class. """ # connection.__instance == connection._Connection__instance # python name mangling x.x # https://docs.python.org/2/reference/expressions.html#atom-identifiers self.assertIs(Connection._Connection__instance, None) instance = Connection.getInstance(); self.assertIs(Connection._Connection__instance, instance) otherInstance = Connection.getInstance(); self.assertIs(Connection._Connection__instance, instance) self.assertIs(instance, otherInstance); self.assertIsInstance(instance, Connection);
def set_datasource(self, driverName): if driverName in self.missDrivers: self.ui.driverBox.setCurrentIndex(0) self.iface.messageBar().pushMessage(u"Nainstalovaná verze GDAL nepodporuje ovladač {}".format(driverName), level=QgsMessageBar.CRITICAL, duration=5) return 0 if driverName in ['SQLite', 'GPKG', 'ESRI Shapefile']: connString = QtGui.QFileDialog.getSaveFileName(self,u'Vybrat/vytvořit soubor','output.{}'.format(self.driverTypes[driverName]),'{} (*.{})'.format(driverName, self.driverTypes[driverName]),QtGui.QFileDialog.DontConfirmOverwrite) if not connString: self.ui.driverBox.setCurrentIndex(0) return 0 driver = ogr.GetDriverByName(str(driverName)) capability = driver.TestCapability(ogr._ogr.ODrCCreateDataSource) if capability: self.ui.driverBox.setToolTip(connString) self.option['driver'] = driverName self.option['datasource'] = connString else: self.iface.messageBar().pushMessage(u"Soubor {} nelze vybrat/vytvořit".format(connString), level=QgsMessageBar.CRITICAL, duration=5) self.ui.driverBox.setCurrentIndex(0) elif driverName in ['PostgreSQL','MSSQLSpatial']: self.connection = Connection(self.iface, driverName, self) self.connection.setModal(True) self.connection.show() self.connection.setWindowTitle(u'Připojení k databázi {}'.format(driverName))
def __init__(self, justPlots = False): self.__name__ = "Core" self.configManager = ConfigurationManager() # These return True of False depending on whether loading the conf was a success. # It should be checked if the conf was loaded successfully and failures should be logged. self.configManager.loadConf(CONFIG_CORE, True) self.configManager.loadConf(CONFIG_SETTINGS, True) self.configManager.loadConf(CONFIG_FORMS, True) self.configManager.loadConf(CONFIG_URLMAP, True) self.configManager.loadConf(CONFIG_MESSAGES, True) self.moduleManager = ModuleManager(self) self.settingsManager = SettingsManager(self) self.clientManager = ClientManager(self) self.sensorManager = SensorManager(self) self.deviceManager = DeviceManager(self) self.taskManager = TaskManager(self) self.messageManager = MessageManager(self) self.logging = Logging(self) if self.settingsManager.equals("plottype", "matplotlib"): from Plot import Plot self.plot = Plot(self) self.protocol = Protocol(self) if not justPlots: self.connection = Connection(self) if not justPlots: self.scheduler = Scheduler() if not justPlots: self.webServer = WebServer(self.connection.getLocalIP(), self.settingsManager.getValueByName("listenport")) # Currently binds to localhost. But this needs to be fixed so other connections can be listened to too.
def __init__(self): self.connection = Connection.getInstance() self.presentation = Presentation(path = None) # the logged in user, None if logged out self.user = None
def __init__(self, host, port, nick): self.nick = nick self.user = '******' % (nick, host, nick) self.buffer_line = '' self.running = False self.connection = Connection(host, port)
def createConnection(self): """ Creates a new connection object. Should be called prior to connecting. """ self.connection = Connection(Settings.NICKS, Settings.LOGIN_USER, Settings.LOGIN_DOMAIN, Settings.LOGIN_REAL_NAME, Settings.IRC_SERVER, Settings.IRC_PORT); self.connection.console = self; self.connection.quitMessage = Settings.QUIT_MESSAGE; self.connection.autoJoinChannels = Settings.AUTO_JOIN_CHANNELS; self.connection.responseSystem = self.responseSystem; self.responseSystem.connection = self.connection;
def handleIncomingConnection(self, sock, addr): ip, port = addr # Connection flood protection if ip in self.ip_incoming: self.ip_incoming[ip] += 1 if self.ip_incoming[ip] > 3: # Allow 3 in 1 minute from same ip self.log.debug("Connection flood detected from %s" % ip) time.sleep(30) sock.close() return False else: self.ip_incoming[ip] = 0 connection = Connection(self, ip, port, sock) self.connections.append(connection) self.ips[ip] = connection connection.handleIncomingConnection(sock)
def customSetup(onReady): c = AuthSuccessTest c.centralProcess = subprocess.Popen(['python', 'Connection/launchCC.py']) time.sleep(1) def onResponse(response): c.response = response; onReady() c.con = Connection.getInstance() c.con.authenticate('student', 'asdf', onResponse)
def post_download(messageid): post = Settings.POST_DB.get_post_form_database(messageid) con = Connection(True) post.get_nzb_content(con) con.disconnect() respons = SABnzbd.Helper.add_spotnet_post(post) if "Error" in respons: oc = ObjectContainer( DirectoryObject( key = Callback(dummy), title = "Error : %s" % respons['Error'], ), ) else: oc = ObjectContainer( DirectoryObject( key = Callback(dummy), title = "Done", ), ) return oc
class ClientInterface: def __init__(self, timeout=10, numRetries=3, verify=True, serverCertFile=os.path.join(os.path.dirname(__file__), os.pardir, 'certificates', 'devserver.cer'), clientPFXFile=os.path.join(os.path.dirname(__file__), os.pardir, 'certificates', 'devclient.pfx'), PFXPasswd='portknocking'): """ This function initializes the Port-Knocking client library \"knock\" Set context parameters and load required certificates timeout: Time in seconds to wait between retries. Default: 10 numRetries: Number of Retries. Default: 3 verify: Verify if the target Port was successfully opened. Only TCP is supported. Default: True serverCertFile: Path to the Server Certificate File encoded in DER. Default: certificates/devserver.cer clientPFXFile: Path to the Client Certificate with Private Key in PKCS#7 Format (.pfx). Default: certificates/devclient.pfx PFXPasswd: Password to decrypt @clientPFXFile """ self.connectionHandler = Connection(CertUtil(serverCertFile, clientPFXFile, PFXPasswd).initializeCryptoEngine(), timeout, numRetries, verify) def knockOnPort(self, host, port, protocol=PROTOCOL.TCP): """ Actual port-knocking function Generate port-knocking packet for opening the requested @port on @host. Can be used to create TCP or UDP connections; Defaults to TCP connection if @protocol is not given. After sending the port-knocking request verifies that the target @port is open, and if necessary retries the port-knocking host: Target @host, on which the application is running port: Port to open on target @host protocol: Requested Target Protocol. Default: TCP """ LOG.debug('Knocking %s on port %s', host, port) self.connectionHandler.knockOnPort(host, port, protocol)
def __init__(self): self.connection = Connection.getInstance() self.presentation = Presentation(path = None) self.roster = Roster() self.forum = Forum() self.questionList = QuestionList() self.__saveListeners = {} # set in WhiteboardNav self.RefreshSlide = None self.Redraw = None # the logged in user, None if logged out self.user = None
def __init__(self, url, build_type_id): self.connection = Connection(url) try: root = self.connection.get_href("/guestAuth/app/rest", {}) except Exception: raise NoConnection if not 'TeamCity REST API' in root: raise NoConnection try: self.build_type = self.get_build_type(build_type_id) except Exception: raise NoBuild
def getConnection(self, ip=None, port=None, peer_id=None, create=True, site=None): if ip.endswith(".onion") and self.tor_manager.start_onions and site: # Site-unique connection for Tor key = ip + site.address else: key = ip # Find connection by ip if key in self.ips: connection = self.ips[key] if not peer_id or connection.handshake.get("peer_id") == peer_id: # Filter by peer_id if not connection.connected and create: succ = connection.event_connected.get() # Wait for connection if not succ: raise Exception("Connection event return error") return connection # Recover from connection pool for connection in self.connections: if connection.ip == ip: if peer_id and connection.handshake.get("peer_id") != peer_id: # Does not match continue if ip.endswith(".onion") and self.tor_manager.start_onions and connection.site_lock != site.address: # For different site continue if not connection.connected and create: succ = connection.event_connected.get() # Wait for connection if not succ: raise Exception("Connection event return error") return connection # No connection found if create: # Allow to create new connection if not found if port == 0: raise Exception("This peer is not connectable") try: if ip.endswith(".onion") and self.tor_manager.start_onions and site: # Lock connection to site connection = Connection(self, ip, port, site_lock=site.address) else: connection = Connection(self, ip, port) self.ips[key] = connection self.connections.append(connection) succ = connection.connect() if not succ: connection.close() raise Exception("Connection event return error") except Exception, err: self.log.debug("%s Connect error: %s" % (ip, Debug.formatException(err))) connection.close() raise err return connection
class Bot: def __init__(self, host, port, nick): self.nick = nick self.user = '******' % (nick, host, nick) self.buffer_line = '' self.running = False self.connection = Connection(host, port) def start(self): self.connection.start() self.send('NICK %s' % self.nick) self.send('USER %s' % self.user) self.send('JOIN #idtest') self.send('PRIVMSG #idtest LULZ') self.running = True while self.running: line = self.connection.get_line() if line: self.handle_line(line) def stop(self): self.running = False self.connection.disconnect() def handle_line(self, line): self.keep_alive(line) print line def keep_alive(self, line): if line[0:4] == 'PING': self.connection.send('PONG %s' % line[5:]) def send(self, x): self.connection.send(x) print '>>> %s' % x
class ssl_dispatcher(asyncore.dispatcher): def create_socket(self, ssl_context): self.family_and_type = socket.AF_INET, socket.SOCK_STREAM self.ssl_ctx = ssl_context self.socket = Connection(self.ssl_ctx) # self.socket.setblocking(0) self.add_channel() def connect(self, addr): self.socket.setblocking(1) self.socket.connect(addr) self.socket.setblocking(0) def recv(self, buffer_size=4096): """Receive data over SSL.""" return self.socket.recv(buffer_size) def send(self, buffer): """Send data over SSL.""" return self.socket.send(buffer)
def __init__(self, timeout=10, numRetries=3, verify=True, serverCertFile=os.path.join(os.path.dirname(__file__), os.pardir, 'certificates', 'devserver.cer'), clientPFXFile=os.path.join(os.path.dirname(__file__), os.pardir, 'certificates', 'devclient.pfx'), PFXPasswd='portknocking'): """ This function initializes the Port-Knocking client library \"knock\" Set context parameters and load required certificates timeout: Time in seconds to wait between retries. Default: 10 numRetries: Number of Retries. Default: 3 verify: Verify if the target Port was successfully opened. Only TCP is supported. Default: True serverCertFile: Path to the Server Certificate File encoded in DER. Default: certificates/devserver.cer clientPFXFile: Path to the Client Certificate with Private Key in PKCS#7 Format (.pfx). Default: certificates/devclient.pfx PFXPasswd: Password to decrypt @clientPFXFile """ self.connectionHandler = Connection(CertUtil(serverCertFile, clientPFXFile, PFXPasswd).initializeCryptoEngine(), timeout, numRetries, verify)
def read_connection_from_sql(sql_file, sql_statement, connections): conn = sqlite3.connect(sql_file) cursor = conn.cursor() cursor.execute(sql_statement) for entry in cursor.fetchall(): conn = Connection() conn._dst_IP = entry[1] conn._DNS = entry[2] conn._rDNS = entry[3] conn._current_volume = entry[4] conn._parentBatchID = entry[6] for pattern in cdn_regex: if pattern.search(entry[2]) or pattern.search(entry[3]): conn._is_CDN_connection = True break connections.append(conn)
def cdn_analysis_helper(container, type): sql = "select * from %sConnections" % (type) sql_conn = sqlite3.connect(opts.file) cursor = sql_conn.cursor() cursor.execute(sql) for entry in cursor.fetchall(): conn = Connection() conn._dst_IP = entry[1] conn._DNS = entry[2] conn._rDNS = entry[3] conn._current_volume = entry[4] conn._parentBatchID = entry[6] for pattern in cdn_regex: if pattern.search(entry[2]) or pattern.search(entry[3]): conn._is_CDN_connection = True break container.append(conn) sql_conn.close()
def getConnection(self, ip=None, port=None, peer_id=None, create=True): if peer_id and peer_id in self.peer_ids: # Find connection by peer id connection = self.peer_ids.get(peer_id) if not connection.connected and create: succ = connection.event_connected.get() # Wait for connection if not succ: raise Exception("Connection event return error") return connection # Find connection by ip if ip in self.ips: connection = self.ips[ip] if not connection.connected and create: succ = connection.event_connected.get() # Wait for connection if not succ: raise Exception("Connection event return error") return connection # Recover from connection pool for connection in self.connections: if connection.ip == ip: if not connection.connected and create: succ = connection.event_connected.get() # Wait for connection if not succ: raise Exception("Connection event return error") return connection # No connection found if create: # Allow to create new connection if not found if port == 0: raise Exception("This peer is not connectable") try: connection = Connection(self, ip, port) self.ips[ip] = connection self.connections.append(connection) succ = connection.connect() if not succ: connection.close() raise Exception("Connection event return error") except Exception, err: self.log.debug("%s Connect error: %s" % (ip, Debug.formatException(err))) connection.close() raise err return connection
class MainApp(QtGui.QDialog): def __init__(self, iface, parent=None): QtGui.QDialog.__init__(self) self.iface = iface self.driverTypes = {'PostgreSQL':'PG','MSSQLSpatial':'MSSQL','SQLite':'sqlite','ESRI Shapefile':'shp','GPKG':'gpkg','Nepodporuje':0} self.driverNames = ['SQLite'] #'PostgreSQL','MSSQLSpatial','SQLite','GPKG', 'ESRI Shapefile', 'Nepodporuje' self.missDrivers = [] self.option = {'driver':None, 'datasource':None, 'layers':[], 'layers_name':[]} # Set up the user interface from Designer. self.ui = Ui_MainApp() self.ui.setupUi(self) # test GDAL version version = gdal.__version__.split('.', 2) if not (int(version[0]) > 1 or int(version[1]) >= 11): self.iface.messageBar().pushMessage(u"GDAL/OGR: požadována verze 1.11 nebo vyšší (nainstalována {}.{})".format(version[0],version[1]), level=QgsMessageBar.CRITICAL, duration=5) # set up widget self.ui.driverBox.setToolTip(u'Zvolte typ výstupního souboru/databáze') self.ui.driverBox.addItem('--Vybrat--') self.set_comboDrivers(self.driverNames) self.ui.driverBox.insertSeparator(4) self.ui.search.addItems(['Obec', 'ORP', 'Okres', 'Kraj']) self.ui.search.setEditable(True) self.ui.search.clearEditText() self.ui.advanced.hide() if not debug: self.ui.import_btn.setEnabled(False) else: self.option['driver'] = 'SQLite' self.option['datasource'] = '/tmp/ruian.db' # Set up the table view path = os.path.join(os.path.dirname(__file__), 'files','obce_cr.csv') self.model, self.proxy = self.create_model(path) self.ui.view.setModel(self.proxy) self.ui.view.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers) self.ui.view.setCornerButtonEnabled(False) self.ui.view.setSortingEnabled(True) self.ui.view.sortByColumn(2,0) self.ui.view.horizontalHeader().setDefaultAlignment(QtCore.Qt.AlignLeft) self.ui.view.horizontalHeader().setResizeMode(0,2) self.ui.view.horizontalHeader().resizeSection(0,28) self.ui.view.horizontalHeader().setStretchLastSection(True) self.ui.view.verticalHeader().setResizeMode(2) self.ui.view.verticalHeader().setDefaultSectionSize(23) self.ui.view.verticalHeader().hide() # SIGNAL/SLOTS CONNECTION self.ui.driverBox.activated['QString'].connect(self.set_datasource) self.ui.driverBox.currentIndexChanged['QString'].connect(self.enable_import) self.ui.search.activated.connect(self.set_searching) self.ui.search.editTextChanged.connect(self.start_searching) self.ui.check.clicked.connect(lambda: self.set_checkstate(0)) self.ui.uncheck.clicked.connect(lambda: self.set_checkstate(1)) self.ui.advanced_btn.clicked.connect(self.show_advanced) self.ui.import_btn.clicked.connect(self.get_options) self.ui.buttonBox.rejected.connect(self.close) # set combobox drivers def set_comboDrivers(self, driverNames): model = self.ui.driverBox.model() for driverName in driverNames: item = QtGui.QStandardItem(str(driverName)) driver = ogr.GetDriverByName(str(driverName)) if driver is None: self.missDrivers.append(driverName) item.setForeground(QtGui.QColor(180,180,180,100)) model.appendRow(item) else: model.appendRow(item) # create model-view def create_model(self, file_path): model = QtGui.QStandardItemModel(self) firts_line = True header = [] header.append('') with open(file_path, 'r') as f: for line in f: line = line.replace('\n','') if firts_line: for word in line.split(','): word = u'{}'.format(word.decode('utf-8')) header.append(word) firts_line = False else: items = [] item = QtGui.QStandardItem('') item.setCheckable(True) item.setSelectable(False) items.append(item) for word in line.split(','): word = u'{}'.format(word.decode('utf-8')) item = QtGui.QStandardItem(word) item.setSelectable(False) items.append(item) model.appendRow(items) model.setHorizontalHeaderLabels(header) proxy = QtGui.QSortFilterProxyModel() proxy.setFilterKeyColumn(2) proxy.setSourceModel(model) return model, proxy # set driver and datasource def set_datasource(self, driverName): if self.ui.import_btn.isEnabled(): self.ui.import_btn.setEnabled(False) if driverName in self.missDrivers: self.ui.driverBox.setCurrentIndex(0) self.iface.messageBar().pushMessage(u"Nainstalovaná verze GDAL nepodporuje ovladač {}".format(driverName), level=QgsMessageBar.CRITICAL, duration=5) return if driverName in ['SQLite', 'GPKG', 'ESRI Shapefile']: ### only SQLite currently works connString = QtGui.QFileDialog.getSaveFileName(self, u'Vybrat/vytvořit soubor','output.{}'.format( self.driverTypes[driverName]), '{} (*.{})'.format(driverName, self.driverTypes[driverName]), QtGui.QFileDialog.DontConfirmOverwrite) if not connString: self.ui.driverBox.setCurrentIndex(0) return driver = ogr.GetDriverByName(str(driverName)) capability = driver.TestCapability(ogr._ogr.ODrCCreateDataSource) if capability: self.ui.driverBox.setToolTip(connString) self.option['driver'] = driverName self.option['datasource'] = connString if not self.ui.import_btn.isEnabled(): self.ui.import_btn.setEnabled(True) else: self.iface.messageBar().pushMessage(u"Soubor {} nelze vybrat/vytvořit".format(connString), level=QgsMessageBar.CRITICAL, duration=5) self.ui.driverBox.setCurrentIndex(0) elif driverName in ['PostgreSQL','MSSQLSpatial']: self.connection = Connection(self.iface, driverName, self) self.connection.setModal(True) self.connection.show() self.connection.setWindowTitle(u'Připojení k databázi {}'.format(driverName)) def enable_import(self, driverName): if driverName == '--Vybrat--': self.ui.driverBox.setToolTip(u'Zvolte typ výstupního souboru/databáze') self.ui.import_btn.setEnabled(False) else: self.ui.import_btn.setEnabled(True) # enable data select def data_select(self, dat_sada_box): if self.ui.dat_sada_box.currentText() == u'základní': self.ui.vyber_z_box.setEnabled(False) else: self.ui.vyber_z_box.setEnabled(True) # filtering tableview def set_searching(self, column): self.proxy.setFilterKeyColumn(column+2) self.ui.search.clearEditText() def start_searching(self, searchName): if searchName not in ['Obec', 'ORP', 'Okres', 'Kraj']: self.proxy.setFilterRegExp(QtCore.QRegExp(searchName, QtCore.Qt.CaseInsensitive)) # check or uncheck items in qtableview def set_checkstate(self, state): rows = self.proxy.rowCount() for row in xrange(0,rows): proxyIdx = self.proxy.index(row,0) modelIdx = self.proxy.mapToSource(proxyIdx) item = self.model.itemFromIndex(modelIdx) if state == 0: item.setCheckState(QtCore.Qt.Checked) elif state == 1: item.setCheckState(QtCore.Qt.Unchecked) # show advance option def show_advanced(self): if self.ui.advanced_btn.arrowType() == 4: self.ui.advanced_btn.setArrowType(QtCore.Qt.DownArrow) self.ui.advanced.show() elif self.ui.advanced_btn.arrowType() == 2: self.ui.advanced_btn.setArrowType(QtCore.Qt.RightArrow) self.ui.advanced.hide() # start importing data def get_options(self): self.option['layers'] = [] self.option['layers_name'] = [] for row in xrange(0,self.model.rowCount()): item = self.model.item(row,0) if item.checkState() == QtCore.Qt.Checked: code = self.model.item(row,1).text() name = self.model.item(row,2).text() self.option['layers'].append(code) self.option['layers_name'].append(name) # generating RUIAN type self.UKSH = {'up':'U','zk':'K', 'sh':'S','zgho':'H'} if self.ui.vyber_z_box.currentText() == u'základní a generalizované hranice': self.UKSH['zgho'] = 'G' elif self.ui.vyber_z_box.currentText() == u'základní': self.UKSH['zgho'] = 'Z' elif self.ui.vyber_z_box.currentText() == u'vlajky a znaky': self.UKSH['zgho'] = 'O' self.option['up'] = 'U' if self.ui.cas_rozsah_box.currentText() == u'přírůstky': self.UKSH['up'] = 'Z' if self.ui.dat_sada_box.currentText() == u'základní': self.UKSH['zk'] = 'Z' self.UKSH['zgho'] = 'Z' if self.ui.platnost_udaju_box.currentText() == u'historické': self.UKSH['sh'] = 'H' self.option['file_type'] = u'{0}{1}{2}{3}'.format(self.UKSH['up'], self.UKSH['zk'], self.UKSH['sh'], self.UKSH['zgho']) # TODO (#3): self.ui.type_time... self.option['data_dir'] = None # TODO (#3): self.ui.data_dir.text() print(self.option['file_type']) if not self.option['layers']: self.iface.messageBar().pushMessage(u"Nejsou vybrána žádná data pro import.", level=QgsMessageBar.INFO, duration=5) return # create progress dialog self.progress = QtGui.QProgressDialog(u'Probíhá import ...', u'Ukončit', 0, 0, self, QtCore.Qt.SplashScreen) self.progress.setParent(self) self.progress.setWindowModality(QtCore.Qt.WindowModal) self.progress.setWindowTitle(u'Import dat RÚIAN') self.progress.canceled.connect(self.import_close) self.progress.setAutoClose(False) self.progress.resize(400, 50) self.progress.show() # start import and set signal self.importThread = ImportThread(self.option) self.importThread.importEnd.connect(self.import_end) self.importThread.importStat.connect(self.set_status) if not self.importThread.isRunning(): self.progress.show() self.importThread.start() # update progress status def set_status(self, num, tot, text, operation): self.progress.setLabelText(u'{0} {1} z {2} ({3})'.format(operation, num, tot, text)) # terminate import def import_close(self): reply = QtGui.QMessageBox.question(self, u'Ukončit', u"Opravdu chcete ukončit import dat?", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.Yes) if reply == QtGui.QMessageBox.Yes: self.importThread.terminate() else: self.progress.resize(400, 50) self.progress.show() # import was succesful def import_end(self): self.progress.cancel() reply = QtGui.QMessageBox.question(self, u'Import', u"Import dat proběhl úspěšně. " u"Přejete si vytvořené vrtsvy do mapového okna?", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No) if reply == QtGui.QMessageBox.Yes: self.add_layers() # close application def close(self): self.hide() # add created layers to map display def add_layers(self): driver = ogr.GetDriverByName(self.option['driver']) datasource = driver.Open(self.option['datasource'], False) if not datasource: self.iface.messageBar().pushMessage(u"Soubor {} nelze načíst".format(self.option['datasource']), level=QgsMessageBar.CRITICAL, duration=5) return # TODO: use uri instead of hardcoded datasource for SQLite # uri = QgsDataSourceURI() # uri.setDatabase(self.option['datasource']) # schema = '' # geom_column = 'GEOMETRY' layers = [] for idx in range(datasource.GetLayerCount()): layer = datasource.GetLayerByIndex(idx) layer_name = layer.GetName() #uri.setDataSource(schema, layer_name, geom_column) vlayer = QgsVectorLayer('{0}|layername={1}'.format(self.option['datasource'], layer_name), layer_name, 'ogr') QgsMapLayerRegistry.instance().addMapLayer(vlayer) del datasource # close datasource
/// /// defNumCaptures = values.num #15 /// defFrameInt = values.interval #20 /// defWarmup = values.warmup #3 /// if values.num is None: defNumCaptures = 9 if values.interval is None: defFrameInt = 2 if values.warmup is None: defWarmup = 4 # Specify (Default) Connection Settings /// defHost = '152.117.177.125' /// defPort = 8000 /// defCon = Connection(defHost, defPort) statusCon = defCon.connect() if statusCon: defCon.close() # Specify (Default) Phone Settings phone = values.gvoice voice = Voice() if values.gvoice: voice.login() # Specify ident ident = -1 path = None try: