def run (): # For ease of configuration, we pull our token from a text file located in the same directory f = open('token.txt', 'r') token = f.readline().strip() f.close() # Process our arguments, which should be safe since they're passed by start queueDir = sys.argv[1] ourID = int(sys.argv[2]) # Our overall work queue workQueue = deque() # Initialize our bot bot = Bot(token) # Load all of our event handlers into our Worker load(bot) # Continually process incoming updates while not canceled(): getMessages() for i in range(len(workQueue)): update = workQueue[i] # Only process if it's a command if update.message.text[0] == '/': command = update.message.text[1:].split()[0] if command in bot.handlers: args = update.message.text[1:].split()[1:] bot.handlers[command](bot, update, args)
def run(): # For ease of configuration, we pull our token from a text file located in the same directory f = open('token.txt', 'r') token = f.readline().strip() f.close() # Process our arguments, which should be safe since they're passed by start queueDir = sys.argv[1] ourID = int(sys.argv[2]) # Our overall work queue workQueue = deque() # Initialize our bot bot = Bot(token) # Load all of our event handlers into our Worker load(bot) # Continually process incoming updates while not canceled(): getMessages() for i in range(len(workQueue)): update = workQueue[i] # Only process if it's a command if update.message.text[0] == '/': command = update.message.text[1:].split()[0] if command in bot.handlers: args = update.message.text[1:].split()[1:] bot.handlers[command](bot, update, args)
def __init__(self,steps,alive_field): QObject.__init__(self) #self.input = "C:/Users/anita_000/Dropbox/QgisSandbox/grid15.shp" self.input = "C:/Users/anita_000/Dropbox/QgisSandbox/gosper_glidergun.csv" self.input_layer = processing.load(self.input) self.input_layer.loadNamedStyle("C:/Users/anita_000/Dropbox/QgisSandbox/conways_game_of_life.qml") self.alive_field = alive_field self.index = QgsSpatialIndex() self._state = 0 self.anim = QPropertyAnimation(self, "state") self.anim.setStartValue(0) self.anim.setEndValue(steps) self.anim.setDuration(5000) self.anim.valueChanged.connect(self.computeNextStep)
def createNodeLayer(self): project_dir = getProjectDir(self.iface, u"基站") if not project_dir: QMessageBox.critical(self, u"错误", u"基站图层不存在!") return False else: project = QgsProject.instance() VoronoiName = u"泰森多边形" NodesName = u"泰森结点" # 若已存在泰森多边形和泰森结点图层,则先移除图层再删除shape文件 voronoi_layer = getLayerByName(VoronoiName, self.iface) if voronoi_layer: QgsMapLayerRegistry.instance().removeMapLayer(voronoi_layer) else: deleteShapefile(project_dir, VoronoiName) nodes_layer = getLayerByName(NodesName, self.iface) if nodes_layer: QgsMapLayerRegistry.instance().removeMapLayer(nodes_layer) else: deleteShapefile(project_dir, NodesName) if (voronoi_layer) or (nodes_layer): project.write() QMessageBox.critical(self, u"错误", u"相应文件已被占用,请重启QGIS软件!") return False site_layer = getLayerByName(u'基站', self.iface) # 生成泰森多边形 VoronoiFile = os.path.join(project_dir, VoronoiName + u".shp") Vor = processing.runalg("qgis:voronoipolygons", site_layer, 0, VoronoiFile) Voronoi = processing.load(Vor['OUTPUT'], VoronoiName) # 生成泰森结点 NodesFile = os.path.join(project_dir, NodesName + u".shp") Nod = processing.runalg("qgis:extractnodes", Voronoi, NodesFile) Nodes = processing.load(Nod['OUTPUT'], NodesName) return Nodes
from qgis.networkanalysis import * # create the graph layer = processing.getObject('network_pgr') director = QgsLineVectorLayerDirector(layer,-1,'','','',3) director.addProperter(QgsDistanceArcProperter()) builder = QgsGraphBuilder(layer.crs()) from_point = QgsPoint(2.73343,3.00581) to_point = QgsPoint(0.483584,2.01487) tied_points = director.makeGraph(builder,[from_point,to_point]) graph = builder.graph() # compute the route from from_id to to_id from_id = graph.findVertex(tied_points[0]) to_id = graph.findVertex(tied_points[1]) (tree,cost) = QgsGraphAnalyzer.dijkstra(graph,from_id,0) # assemble the route route_points = [] curPos = to_id while (curPos != from_id): in_vertex = graph.arc(tree[curPos]).inVertex() route_points.append(graph.vertex(in_vertex).point()) curPos = graph.arc(tree[curPos]).outVertex() route_points.append(from_point) # write the results to a Shapefile result = 'C:\\temp\\route.shp' writer = VectorWriter(result,None,[],2,layer.crs()) fet = QgsFeature() fet.setGeometry(QgsGeometry.fromPolyline(route_points)) writer.addFeature(fet) del writer processing.load(result)
knightMove = False keepNull = False grassRes = 0 #default ogrSnap = -1 #no snap ogrMinArea = 0.0001 result = processing.runalg("grass7:r.cost.full", pixelTime, startPts, knightMove, keepNull, extent, grassRes, ogrSnap, ogrMinArea, rcost) if not result or not os.path.isfile(rcost):#result dict can be not empty but there is no file ! raise GeoAlgorithmExecutionException('err grass rcost') progress.setPercentage(100) #STYLE (-->méthodo alternative, charger un fichier de style préparé à l'avance) progress.setText('Stylizing output raster') #costLay = processing.getObject(rcost) costLay = processing.load(rcost)#Attention du coup la couche est chargée 2 fois: une fois ici et une fois par runalg sm = costLay.styleManager() timeIntervals = timeIntervals.split(',') timeIntervals = map(int,timeIntervals) maxTime = int(maxTime) if Style_unit == 1:#Hours #convertir en minutes maxTime=maxTime*60 timeIntervals=[interval*60 for interval in timeIntervals] for timeInterval in timeIntervals: if Style_unit == 1:#Hours
from qgis.networkanalysis import * # create the graph layer = processing.getObject('network_pgr') director = QgsLineVectorLayerDirector(layer, -1, '', '', '', 3) director.addProperter(QgsDistanceArcProperter()) builder = QgsGraphBuilder(layer.crs()) from_point = QgsPoint(2.73343, 3.00581) to_point = QgsPoint(0.483584, 2.01487) tied_points = director.makeGraph(builder, [from_point, to_point]) graph = builder.graph() # compute the route from from_id to to_id from_id = graph.findVertex(tied_points[0]) to_id = graph.findVertex(tied_points[1]) (tree, cost) = QgsGraphAnalyzer.dijkstra(graph, from_id, 0) # assemble the route route_points = [] curPos = to_id while (curPos != from_id): in_vertex = graph.arc(tree[curPos]).inVertex() route_points.append(graph.vertex(in_vertex).point()) curPos = graph.arc(tree[curPos]).outVertex() route_points.append(from_point) # write the results to a Shapefile result = 'C:\\temp\\route.shp' writer = VectorWriter(result, None, [], 2, layer.crs()) fet = QgsFeature() fet.setGeometry(QgsGeometry.fromPolyline(route_points)) writer.addFeature(fet) del writer processing.load(result)
from numpy import * import Gnuplot, Gnuplot.funcutils import processing as p array = [] p.load(array) g = Gnuplot.Gnuplot(debug=1) # print (array)
def load(*args): processing.load(args[0])
grassRes = 0 #default ogrSnap = -1 #no snap ogrMinArea = 0.0001 result = processing.runalg("grass7:r.cost.full", pixelTime, startPts, knightMove, keepNull, extent, grassRes, ogrSnap, ogrMinArea, rcost) if not result or not os.path.isfile( rcost): #result dict can be not empty but there is no file ! raise GeoAlgorithmExecutionException('err grass rcost') progress.setPercentage(100) #STYLE (-->méthodo alternative, charger un fichier de style préparé à l'avance) progress.setText('Stylizing output raster') #costLay = processing.getObject(rcost) costLay = processing.load( rcost ) #Attention du coup la couche est chargée 2 fois: une fois ici et une fois par runalg sm = costLay.styleManager() timeIntervals = timeIntervals.split(',') timeIntervals = map(int, timeIntervals) maxTime = int(maxTime) if Style_unit == 1: #Hours #convertir en minutes maxTime = maxTime * 60 timeIntervals = [interval * 60 for interval in timeIntervals] for timeInterval in timeIntervals:
print("[§] Importing modules...") import sys from _utilities import * from processing import load from vectorize import extract_features from classification import classifier from sklearn # DEBUG from sklearn.model_selection import KFold from _utilities import * def splitDataframe(df, splits=3): return KFold(n_splits=splits).get_n_splits(df) #DEBUG if __name__ == '__main__': mails = load(sys.argv[1]) _, features = extract_features(mails["message"]) classifier = classifier(features, mails["needs_reply"]) # Do something with Classifier
# 0.357, 0.241, 0.26 , 0.401, 0.185, 0.172, 0.248, 0.4 , # 0.482, 0.159, 0.373, 0.455, 0.083, 0.128]) import processing import numpy as np import pysal np.random.seed(12345) result = processing.runalg( "script:localmorans", pysal.examples.get_path("stl_hom.shp"), "HR8893", 1, # for rook None) result_layer = processing.load(result['morans_output']) p = [] for f in result_layer.getFeatures(): p.append(f['MORANS_P']) observed = p desired = [ 0.176, 0.073, 0.405, 0.267, 0.332, 0.057, 0.296, 0.242, 0.055, 0.062, 0.273, 0.488, 0.44 , 0.354, 0.415, 0.478, 0.473, 0.374, 0.415, 0.21 , 0.161, 0.025, 0.338, 0.375, 0.285, 0.374, 0.208, 0.3 , 0.373, 0.411, 0.478, 0.414, 0.009, 0.429, 0.269, 0.015, 0.005, 0.002, 0.077, 0.001, 0.088, 0.459, 0.435, 0.365, 0.231, 0.017, 0.033, 0.04 , 0.068, 0.101, 0.284, 0.309, 0.113, 0.457, 0.045, 0.269, 0.118, 0.346, 0.328, 0.379, 0.342, 0.39 , 0.376, 0.467, 0.357, 0.241, 0.26 , 0.401, 0.185, 0.172, 0.248, 0.4 ,
from numpy import * import Gnuplot, Gnuplot.funcutils import processing as p array = [] p.load(array) g = Gnuplot.Gnuplot(debug=1) #print (array)