Example #1
0
def report(text, fname):
  common.progress('saving report')
  try:
    with open(fname, 'w') as outfile:
      outfile.write(text.encode('utf8'))
  except (IOError, OSError, UnicodeEncodeError):
    common.warning('Report output failed.')
Example #2
0
def normalize_records(inputfiles, outputdir):
    print("Normalizing", len(inputfiles), "records to", outputdir)
    for index, (filename, record) in enumerate(read_records(inputfiles)):
        progress((index + 1) / len(inputfiles))
        normalized_record = normalize(record)
        out_file = os.path.basename(filename).replace("xml", "json")
        write_json_file(outputdir, out_file, normalized_record)
Example #3
0
def normalize_records(inputfiles, outputdir):
    print("Normalizing", len(inputfiles), "records to", outputdir)
    for index, (filename, record) in enumerate(read_records(inputfiles)):
        progress((index+1)/len(inputfiles))
        normalized_record = normalize(record)
        out_file = os.path.basename(filename).replace("xml", "json")
        write_json_file(outputdir, out_file, normalized_record)
def createAreals(zones, thrDens):
  areals = []
  common.progress('creating areals')
  for zone in zones.values():
    if zone.get('dens') >= thrDens:
      areals.append(DensityAreal(zone))
  return areals
def createAreals(zones, thrDens):
    areals = []
    common.progress('creating areals')
    for zone in zones.values():
        if zone.get('dens') >= thrDens:
            areals.append(DensityAreal(zone))
    return areals
Example #6
0
 def __init__(self, table, odIDFields, places, placeIDField, location, **kwargs):
   TableLinker.__init__(self, table, odIDFields, places, placeIDField, location, **kwargs)
   self.placeMapper.setIDTransfer(True)
   self.placeIDQPart = common.query(self.places, '[%s] = ', self.placesIDField)
   self.placeFC = self.places
   if not common.isLayer(self.places):
     common.progress('creating place selection layer')
     self.places = arcpy.MakeFeatureLayer_management(self.placeFC, self.PLACES_LAY).getOutput(0)
Example #7
0
def normalize_records(inputfiles, outputdir):
    print("Normalizing", len(inputfiles), "records to", outputdir)
    for index, (filename, record) in enumerate(read_records(inputfiles)):
        progress((index + 1) / len(inputfiles))
        normalized_record = normalize(record)
        print 'normalized record: ', normalized_record
        #new_normalized_record = [unicode(elem).encode('utf-8') for elem in normalized_record]
        #print 'new normalized record: ', new_normalized_record

        out_file = os.path.basename(filename).replace("xml", "json")
        write_json_file(outputdir, out_file, normalized_record)
Example #8
0
 def process(self):
   # check if ID is integer - if not, create an integer field
   count = common.count(self.zones)
   idFld = self.zoneMapper.getIntIDField(setID=True, unmapPrevious=False, progressor=common.progressor('creating temporary IDs', count))
   self.zoneMapper.loadData(common.progressor('loading zone data', count))
   # generate SWM file
   common.progress('generating spatial matrix')    
   arcpy.GenerateSpatialWeightsMatrix_stats(self.zones, idFld, self.swmFile, self.method)
   common.progress('converting matrix to table')
   arcpy.ConvertSpatialWeightsMatrixtoTable_stats(self.swmFile, self.tmpTable)
   self.zoneMapper.setJoinIDFields([idFld, self.NEIGH_ID_FLD])
Example #9
0
def enrich_records(inputfiles, outputdir, force=False):
    print("Enriching", len(inputfiles), "records. Saving to", outputdir)
    for index, (filename, record) in enumerate(read_records(inputfiles)):
        progress((index + 1) / len(inputfiles))
        out_file = os.path.basename(filename).replace(".json",
                                                      "_enriched.json")
        out_path = outputdir + "/" + out_file
        if (os.path.exists(out_path) and not force):
            print(out_file, "already enriched. Skipping...")
        else:
            enriched_record = enrich(record)
            write_json_file(outputdir, out_file, enriched_record)
Example #10
0
def enrich_records(inputfiles, outputdir, force=False):
    print("Enriching", len(inputfiles), "records. Saving to", outputdir)
    for index, (filename, record) in enumerate(read_records(inputfiles)):
        progress((index+1)/len(inputfiles))
        out_file = os.path.basename(filename).replace(".json",
                                                      "_enriched.json")
        out_path = outputdir + "/" + out_file
        if(os.path.exists(out_path) and not force):
            print(out_file, "already enriched. Skipping...")
        else:
            enriched_record = enrich(record)
            write_json_file(outputdir, out_file, enriched_record)
def resolveEnclaves(areals, thrDens, minMass=0):
    common.progress('resolving enclaves')
    newAreals = []
    while areals:
        areal = areals.pop()
        if areal:
            if areal.isInEnclave():
                areal.erase()
            else:
                areal.includeEnclaves()  # include all pockets
                newAreals.append(areal)
                areals.extend(areal.densify(thrDens, minMass))
    return newAreals
def resolveEnclaves(areals, thrDens, minMass=0):
  common.progress('resolving enclaves')
  newAreals = []
  while areals:
    areal = areals.pop()
    if areal:
      if areal.isInEnclave():
        areal.erase()
      else:
        areal.includeEnclaves() # include all pockets
        newAreals.append(areal)
        areals.extend(areal.densify(thrDens, minMass))
  return newAreals
Example #13
0
 def loadNetwork(self, network, cost, searchDist, mappings=[]):
   common.progress('calculating places\' network locations')
   network = common.checkFile(network)
   self.calculateLocations(network, self.places, searchDist)
   self.mappings = self.networkMappings(common.NET_FIELDS + mappings)
   if self.speedup:
     self.performSpeedup(network, cost)
   common.progress('preparing routing layer')
   self.naLayer = self.makeRouteLayer(network, self.ROUTE_LAY, cost)
   self.routeSublayer = common.sublayer(self.naLayer, 'Routes')
   # common.debug(common.fieldList(self.routeSublayer))
   self.cost = cost
   self.placeMapper.addSilentField(self.cost, float)
Example #14
0
def progress_on_fs_and_db(repo, fs_dir, do_checksum):
    total_len = common.db_length(repo.db_file)

    count = 0
    db = common.browse_db(repo.db_file)
    fs = common.browse_filesystem(fs_dir, do_checksum)
    state = FileState.OK

    while True:
        if state in (FileState.OK, FileState.DIFFERENT):
            fs_entry = next(fs)
                
            if fs_entry is not None:
                # not a new directory
                db_entry = next(db)

        elif fs_entry is False or state is FileState.MISSING_IN_FS:
            db_entry = next(db)
            
        elif db_entry is False or state is FileState.MISSING_ON_DB:
            fs_entry = next(fs)
            
        else:
            assert False # should not come here
                
        if fs_entry is None:
            # new directory
            continue 
            
        if fs_entry is False and db_entry is False:
            common.progress(total_len, total_len)
            print("")
            
            raise StopIteration()
        
        if fs_entry is False: # no more fs entries
            state = FileState.MISSING_IN_FS # so file cannot be on fs
            
        elif db_entry is False: # no more db entries
            state = FileState.MISSING_ON_DB # so file cannot be on db
            
        else:
            # returns None if could compare,
            #      or db_fullpath > fs_fullpath
            state, diff = compare_entries(fs_dir, fs_entry, db_entry, do_checksum)

        common.progress(count, total_len)
        count += 1
        
        yield state, diff, db_entry, fs_entry
Example #15
0
	def cmd_put(self,*args):
		'''
		上传文件
		:return:
		'''
		cmd_split = args[0].split()
		if len(cmd_split)>1:
			file_name = cmd_split[1]
			if os.path.isfile(file_name):
				# 组织头文件,发送给客户端确认
				file_size = os.stat(file_name).st_size
				if self.user_cur_dir[-1] ==os.sep:
					client_path = self.user_cur_dir + file_name
				else:
					client_path = self.user_cur_dir + os.sep + file_name

				# 额度校验
				if self.limit_size < self.used_size + file_size:
					common.errorPrompt('已超过上传的额度[%s]k,当前已用[%s]k,当前文件[%s]k'
									   %(self.limit_size,self.used_size,file_size))
					return
				header={
					'file_name':client_path,
					'file_size':file_size,
					'overridden':True,
					'action':'put'
				}
				header_json = json.dumps(header)
				self.client.send(header_json.encode(CODING))
				client_reply = self.client.recv(BUFSIZ).decode()
				# 回复码验证
				if client_reply.__contains__('200'):
					m = hashlib.md5()
					f = open(file_name,'rb')
					send_size = 0
					for line in f:
						self.client.send(line)
						m.update(line)
						send_size += len(line)
						percent = send_size / file_size  # 接收的比例
						common.progress(percent, width=80)  # 进度条的宽度80
					f.close()
					local_md5 = m.hexdigest()
					self.client.send(local_md5.encode(CODING))
					print('send done')



			else:
				print(file_name,' is not exit')
Example #16
0
	def cmd_get(self,*args):
		'''
		下载文件
		:return:
		'''
		cmd_split = args[0].split()
		if len(cmd_split) > 1:
			file_name = cmd_split[1]
			if self.user_cur_dir[-1] == os.sep:
				client_path = self.user_cur_dir + file_name
			else:
				client_path = self.user_cur_dir + os.sep + file_name
			header = {
				'file_name': client_path,
				'action': 'get'
			}
			header_json = json.dumps(header)
			self.client.send(header_json.encode(CODING))
			# 获取文件信息
			server_reply = self.client.recv(BUFSIZ).decode(CODING)
			print(server_reply)
			# 客户端确认
			self.client.send('ok'.encode(CODING))
			file_size = int(server_reply)
			receive_size = 0
			file_name = cmd_split[1]
			m = hashlib.md5()
			# 写本地文件
			with open(file_name + '.new', 'wb') as f:
				while receive_size < file_size:
					# 解决粘包问题:获取文件大小的数据,作为边界
					cur_buf_size = file_size - receive_size
					if cur_buf_size > BUFSIZ:
						cur_buf_size = BUFSIZ

					data = self.client.recv(cur_buf_size)
					f.write(data)
					receive_size += len(data)  # 注意:一定不能+cur_buf_size,要以实际收到的数据为准
					m.update(data)
					percent = receive_size / file_size  # 接收的比例
					common.progress(percent, width=80)  # 进度条的宽度80
				else:
					local_md5 = m.hexdigest()
					server_md5 = self.client.recv(BUFSIZ)
					server_md5 = server_md5.decode()
					if local_md5 == server_md5:
						print('file rec done.')
					else:
						common.errorPrompt('data is missing or changed..')
Example #17
0
 def loadNetwork(self, network, cost=None, cutoff=None, numToFind=None, searchDist=None, cutoffFld=None, numToFindFld=None, mappings=[]):
   common.progress('creating routing layer')
   if not numToFind:
     numToFind = common.count(self.places)
   self.naLayer = self.makeNALayer(common.checkFile(network), self.NA_LAY, cost, cutoff, numToFind)
   self.cost = cost
   self.placeMapper.addSilentField(cost, float)
   common.progress('calculating places\' network locations')
   self.calculateLocations(network, self.places, searchDist)
   common.progress('loading places to network')
   # create mappings
   toMappingList = common.NET_FIELDS + [(self.NAME_MAP, self.placesIDField, None)]
   for item in mappings:
     toMappingList.append(item + [None])
   fromMappingList = toMappingList[:]
   if cutoffFld:
     fromMappingList.append((self.CUTOFF_PREFIX + cost, cutoffFld, None))
   if numToFindFld:
     fromMappingList.append((self.NUM_TO_FIND_HEADER, numToFindFld, None))
   # load locations
   arcpy.AddLocations_na(self.NA_LAY, self.OD_SUBLAYERS[0], self.places, self.networkMappings(fromMappingList), '', append='clear')
   arcpy.AddLocations_na(self.NA_LAY, self.OD_SUBLAYERS[1], self.places, self.networkMappings(toMappingList), '', append='clear')
   self.routeSublayer = common.sublayer(self.naLayer, self.OUTPUT_SUBLAYER)
   self.linkMapper = conversion.LinkFieldMapper(self.routeSublayer)
   try:
     self.linkMapper.addMapField(self.OUT_COST_PREFIX + cost, cost)
   except conversion.FieldError:
     raise conversion.FieldError, 'cost attribute %s not found in network dataset' % cost
Example #18
0
 def performSpeedup(self, network, cost):
   common.progress('preparing speedup search')
   speeder = BulkConnectionCreator(self.placeFC, self.placesIDField, self.location, excludeSelf=True, messenger=common.Messenger(common.getDebugMode()))
   speeder.loadNetwork(network, cost, cutoff=self.speedupDistance)
   common.progress('performing speedup search')
   speeder.loadPlaces()
   speeder.solve()
   common.progress('reading speedup data')
   self.cache = speeder.getGeometryDict({cost : 'Total_' + cost})
def delimitDensityAreals(zones, idFld, popFld, thrDens, minPop, targetFld, neighTable=None, doMergeEnclaves=True):
  common.progress('loading areal data')
  loader = loaders.RegionalLoader()
  # common.progress('calculating zone densities')
  areaFld = common.ensureShapeAreaField(zones)
  inSlots = {'id' : idFld, 'mass' : popFld, 'area' : areaFld}
  loader.sourceOfZones(zones, inSlots, targetClass=DensityZone)
  loader.possibleNeighbourhood(neighTable, exterior=True)
  loader.load()
  zones = loader.getZoneDict()
  common.progress('delimiting areals')
  regionalize(zones, thrDens, minPop, doMergeEnclaves)
  common.progress('saving data')
  loader.addZoneOutputSlot('assign', targetFld, require=True)
  loader.outputZones(zones)
Example #20
0
 def loadNetwork(self, network, cost=None, searchDist=None, breaks=[1]):
   common.progress('creating routing layer')
   # if not numToFind:
     # numToFind = common.count(self.places)
   self.naLayer = self.makeNALayer(common.checkFile(network), self.NA_LAY, cost, breaks)
   common.progress('calculating places\' network locations')
   self.calculateLocations(network, self.places, searchDist)
   common.progress('loading places to network')
   # create mappings
   toMappingList = common.NET_FIELDS + [(self.NAME_MAP, self.placesIDField, None)]
   for item in mappings:
     toMappingList.append(item + [None])
   # fromMappingList = toMappingList[:]
   # if cutoffFld:
     # fromMappingList.append((self.CUTOFF_PREFIX + cost, cutoffFld, None))
   # if numToFindFld:
     # fromMappingList.append((self.NUM_TO_FIND_HEADER, numToFindFld, None))
   # load locations
   arcpy.AddLocations_na(self.NA_LAY, self.OD_SUBLAYER, self.places, self.networkMappings(toMappingList), '', append='clear')
   self.outputSublayer = common.sublayer(self.naLayer, self.OUTPUT_SUBLAYER)
def delimitDensityAreals(zones,
                         idFld,
                         popFld,
                         thrDens,
                         minPop,
                         targetFld,
                         neighTable=None,
                         doMergeEnclaves=True):
    common.progress('loading areal data')
    loader = loaders.RegionalLoader()
    # common.progress('calculating zone densities')
    areaFld = common.ensureShapeAreaField(zones)
    inSlots = {'id': idFld, 'mass': popFld, 'area': areaFld}
    loader.sourceOfZones(zones, inSlots, targetClass=DensityZone)
    loader.possibleNeighbourhood(neighTable, exterior=True)
    loader.load()
    zones = loader.getZoneDict()
    common.progress('delimiting areals')
    regionalize(zones, thrDens, minPop, doMergeEnclaves)
    common.progress('saving data')
    loader.addZoneOutputSlot('assign', targetFld, require=True)
    loader.outputZones(zones)
class Halver(loaders.FunctionUpdater):
  requiredInputSlots = ['doubled']
  requiredOutputSlots = ['halved']
  
  @staticmethod
  def translate(inprow={'doubled' : 0.0}):
    return {'halved' : inprow['doubled'] / 2.0}

with common.runtool(4) as parameters:
  conns, trafFld, tolerance, outPath = parameters
  location = os.path.dirname(outPath)
  duplPath = common.addExt(os.path.join(location, DUPL_FILE))
  mergePath = common.addExt(os.path.join(location, MERGE_FILE))
  interPath = common.addExt(os.path.join(location, INTER_FILE))
  common.progress('preparing connections')
  arcpy.CopyFeatures_management(conns, duplPath)
  arcpy.Merge_management([conns, duplPath], mergePath)
  common.progress('intersecting connections')
  arcpy.Intersect_analysis([mergePath], interPath, 'ALL', tolerance, 'INPUT')
  # create a shape length field
  common.progress('marking traffic')
  arcpy.AddField_management(interPath, SHPLEN_FLD, 'Double')
  arcpy.CalculateField_management(interPath, SHPLEN_FLD, '!shape.length!', 'PYTHON_9.3')
  common.progress('summarizing traffic')
  arcpy.Dissolve_management(interPath, outPath, [SHPLEN_FLD], [[trafFld, 'SUM']], 'SINGLE_PART')
  common.progress('initializing traffic fields')
  arcpy.AddField_management(outPath, trafFld, common.typeOfField(conns, trafFld))
  sumFld = 'SUM_' + trafFld # TODO: shapefile field name length adjustments...
  Halver(outPath, {'doubled' : sumFld}, {'halved' : trafFld}).decompose('computing traffic fields')
  # prog = common.progressor('adjusting total flow counts', common.count(outPath))
Example #23
0
import common, objects, loaders, regionalization, collections, operator

INTRAFLOW_MODES = {'NONE' : [], 'ALL' : [(True, True, True, True)], 'CORE-HINTERLAND' : [(True, False, False, True), (False, True, True, False)], 'CORE-CORE' : [(True, False, True, False)], 'CORE-(CORE+HINTERLAND)' : [(True, False, True, True), (False, True, True, False)]}

with common.runtool(11) as parameters:
  common.progress('loading settings')
  zoneLayer, zoneIDFld, zoneRegFld, zoneCoreFld, aggregMode, intraflowMode, \
    interLayer, interFromIDFld, interToIDFld, aggregFldsStr, outPath = parameters
  # set up data loading
  regload = loaders.RegionalLoader(regionalization.Regionalizer(objects.FunctionalRegion))
  regload.sourceOfZones(zoneLayer, {'id' : zoneIDFld}, targetClass=objects.MonoZone)
  regload.sourceOfPresets({'assign' : zoneRegFld, 'coop' : zoneCoreFld})
  interDict = {'from' : interFromIDFld, 'to' : interToIDFld}
  aggregFlds = common.parseFields(aggregFldsStr)
  interDict.update({fld : fld for fld in aggregFlds})
  regload.sourceOfMultiInteractions(interLayer, interDict, ordering=aggregFlds)
  # flow aggregation mode
  regSrc, regTgt = aggregMode.split('-')
  hinterSrc = bool(regSrc != 'CORE')
  coreTgt = bool(regTgt == 'CORE')
  try:
    intraFlowSetting = INTRAFLOW_MODES[intraflowMode]
  except KeyError:
    raise ValueError, 'unknown intraflow setting: ' + intraflowMode
  # load the data
  regload.load()
  # aggregate
  regs = regload.getRegionalizer().getRegions()
  common.progress('aggregating flows')
  flows = {}
  for region in regs:
Example #24
0
import common, colors, loaders

with common.runtool(7) as parameters:
  zoneLayer, idFld, coreQuery, colorFld, colorFile, randomizeColors, neighTable = parameters
  shuffle = common.toBool(randomizeColors, 'color randomization switch')
  if not neighTable:
    common.progress('computing feature neighbourhood')
    import conversion
    neighTable = conversion.generateNeighbourTableFor(zoneLayer, idFld)
  common.progress('loading neighbourhood')
  neighbourhood = loaders.NeighbourTableReader(neighTable).read()
  common.debug(neighbourhood)
  common.progress('loading color setup')
  chooser = colors.ColorChooser(neighbourhood, colorFile)
  common.progress('assigning colors')
  colored = chooser.colorHeuristically(shuffle=shuffle)
  common.progress('writing output')
  loaders.ColorMarker(zoneLayer, inSlots={'id' : idFld}, outSlots={'color' : colorFld}, outCallers={'color' : 'getColor'}, where=coreQuery).mark(colored)
  # # TODO
  # common.progress('creating color field')
  # if colorFld not in common.fieldList(zoneLayer):
    # arcpy.AddField_management(zoneLayer, colorFld, 'TEXT')
  # zoneRows = arcpy.UpdateCursor(zoneLayer, coreQuery)
  # zoneColors = defaultdict(str)
  # for row in zoneRows:
    # id = row.getValue(idFld)
    # if id in colored:
      # row.setValue(colorFld, colored[id].getColor())
      # zoneRows.updateRow(row)
  # del zoneRows
import arcpy, common

strLayer = 'tmp_i095'
relLayer = 'tmp_i043'

with common.runtool(7) as parameters:
  interLayer, strengthFld, lengthFld, minStrengthStr, minRelStrengthStr, maxLengthStr, output = parameters
  if minStrengthStr or maxLengthStr:
    queries = []
    if minStrengthStr:
      common.progress('assembling absolute strength exclusion')
      minStrength = common.toFloat(minStrengthStr, 'minimum absolute interaction strength')
      queries.append(common.query(interLayer, '[%s] >= %g', strengthFld, minStrength))
    if maxLengthStr:
      common.progress('assembling absolute length exclusion')
      maxLength = common.toFloat(maxLengthStr, 'maximum absolute interaction length')
      queries.append(common.query(interLayer, '[%s] <= %g', lengthFld, maxLength))
    common.selection(interLayer, strLayer, ' OR '.join(queries))
  else:
    strLayer = interLayer
  if minRelStrengthStr:
    common.progress('performing relative strength exclusion')
    minRelStrength = common.toFloat(minRelStrengthStr, 'minimum relative interaction strength')
    relQuery = common.query(interLayer, '[%s] > 0 AND ([%s] / [%s] * 1000) >= %g', lengthFld, strengthFld,
      lengthFld, minRelStrength)
    common.select(strLayer, relLayer, relQuery)
  else:
    relLayer = strLayer
  common.progress('counting selected interactions')
  common.message('%i interactions selected.' % common.count(relLayer))
  common.progress('writing output')
import common, objects, loaders, regionalization, collections

with common.runtool(6) as parameters:
  zoneLayer, idFld, massFld, reg1Fld, reg2Fld, outPath = parameters
  common.progress('loading zones')
  zones = []
  for regFld in (reg1Fld, reg2Fld):
    loader = loaders.RegionalLoader(regionalization.Regionalizer(objects.PlainRegion))
    loader.sourceOfZones(zoneLayer, {'id' : idFld, 'mass' : massFld}, targetClass=objects.NoFlowZone)
    loader.sourceOfPresets({'assign' : regFld})
    loader.load()
    zones.append(loader.getZoneDict())
  common.progress('computing mismatch')
  mismatch = collections.defaultdict(lambda: collections.defaultdict(float))
  for id in zones[0]:
    reg1 = zones[0][id].getRegion()
    if reg1 is not None: reg1 = reg1.getID()
    reg2 = zones[1][id].getRegion()
    if reg2 is not None: reg2 = reg2.getID()
    mass = zones[0][id].getMass()
    mismatch[reg1][reg2] += mass
  common.progress('saving')
  loaders.InteractionWriter(outPath, {'from' : 'REG_1', 'to' : 'REG_2', 'value' : 'MASS'}).saveRelations(mismatch)
Example #27
0
def table(zones, idFld, output, exterior=True, selfrel=True):
    common.debug('running neighbour table', zones, idFld, output, exterior,
                 selfrel)
    with common.PathManager(output) as pathman:
        if exterior:
            common.progress('mapping zone surroundings')
            buffer = pathman.tmpFC()
            arcpy.Buffer_analysis(zones,
                                  buffer,
                                  '50 Meters',
                                  dissolve_option='ALL')
            common.progress('creating exterior zone')
            erased = pathman.tmpFC()
            arcpy.Erase_analysis(buffer, zones, erased, TOLERANCE)
            common.progress('identifying exterior zone')
            common.calcField(erased, idFld, EXTERIOR_ID,
                             common.pyTypeOfField(zones, idFld))
            # common.progress('eliminating sliver polygons')
            common.progress('merging exterior zone')
            jointo = pathman.tmpFC()
            arcpy.Merge_management([zones, erased], jointo)
        else:
            jointo = zones
        common.progress('finding neighbours')
        joined = pathman.tmpFC()
        fm = arcpy.FieldMappings()
        fm.addFieldMap(
            common.fieldMap(zones, idFld, common.NEIGH_FROM_FLD, 'FIRST'))
        fm.addFieldMap(
            common.fieldMap(jointo, idFld, common.NEIGH_TO_FLD, 'FIRST'))
        arcpy.SpatialJoin_analysis(zones, jointo, joined, 'JOIN_ONE_TO_MANY',
                                   'KEEP_COMMON', fm, 'INTERSECT', TOLERANCE)
        common.progress('converting to neighbour table')
        fm2 = arcpy.FieldMappings()
        fm.addFieldMap(
            common.fieldMap(joined, common.NEIGH_FROM_FLD,
                            common.NEIGH_FROM_FLD, 'FIRST'))
        fm.addFieldMap(
            common.fieldMap(joined, common.NEIGH_TO_FLD, common.NEIGH_TO_FLD,
                            'FIRST'))
        if selfrel:
            query = common.safeQuery(
                "[{}] <> '{}'".format(common.NEIGH_FROM_FLD, EXTERIOR_ID),
                joined)
        else:
            query = common.safeQuery(
                "[{0}] <> [{1}] AND [{0}] <> '{2}'".format(
                    common.NEIGH_FROM_FLD, common.NEIGH_TO_FLD, EXTERIOR_ID),
                joined)
        arcpy.TableToTable_conversion(joined, pathman.getLocation(),
                                      pathman.getOutputName(), query, fm2)
        common.clearFields(output,
                           [common.NEIGH_FROM_FLD, common.NEIGH_TO_FLD])
    return output
## IMPORT MODULES
import sys
sys.path.append('.')
import common # useful shortcut functions for arcpy and datamodel
import delimit_functional_regions

common.debugMode = True
common.progress('loading tool parameters')
# whoa, have fun with the parameters
zoneLayer, \
zoneIDFld, zoneMassFld, zoneCoopFld, zoneRegFld, zoneColFld, coreQuery, \
outRegFld, doOutCoreStr, outColFld, measureFlds, \
interTable, interFromIDFld, interToIDFld, interStrFld, \
neighTable, exclaveReassignStr, oscillationStr, doSubBindStr, doCoreFirstStr, \
aggregSorterStr, threshold1Str, threshold2Str = common.parameters(23)

delimit_functional_regions.main(zoneLayer, zoneIDFld, zoneMassFld, zoneCoopFld, zoneRegFld, zoneColFld, coreQuery, outRegFld, doOutCoreStr, outColFld, measureFlds, interTable, interFromIDFld, interToIDFld, interStrFld, neighTable, exclavePenalStr=('100' if common.toBool(exclaveReassignStr, 'exclave reassignment switch') else '0'), aggregSorterStr=aggregSorterStr, verifier1Str='MASS', threshold1Str=threshold1Str, verifier2Str='HINTERLAND_MASS', threshold2Str=threshold2Str, oscillationStr=oscillationStr, doSubBindStr=doSubBindStr, doCoreFirstStr=doCoreFirstStr)

Example #29
0
def table(zones, idFld, output, exterior=True, selfrel=True):
  common.debug('running neighbour table', zones, idFld, output, exterior, selfrel)
  with common.PathManager(output) as pathman:
    if exterior:
      common.progress('mapping zone surroundings')
      buffer = pathman.tmpFC()
      arcpy.Buffer_analysis(zones, buffer, '50 Meters', dissolve_option='ALL')
      common.progress('creating exterior zone')
      erased = pathman.tmpFC()
      arcpy.Erase_analysis(buffer, zones, erased, TOLERANCE)
      common.progress('identifying exterior zone')
      common.calcField(erased, idFld, EXTERIOR_ID,
        common.pyTypeOfField(zones, idFld))
      # common.progress('eliminating sliver polygons')
      common.progress('merging exterior zone')
      jointo = pathman.tmpFC()
      arcpy.Merge_management([zones, erased], jointo)
    else:
      jointo = zones
    common.progress('finding neighbours')
    joined = pathman.tmpFC()
    fm = arcpy.FieldMappings()
    fm.addFieldMap(common.fieldMap(zones, idFld, common.NEIGH_FROM_FLD, 'FIRST'))
    fm.addFieldMap(common.fieldMap(jointo, idFld, common.NEIGH_TO_FLD, 'FIRST'))
    arcpy.SpatialJoin_analysis(zones, jointo, joined, 'JOIN_ONE_TO_MANY', 'KEEP_COMMON', fm, 'INTERSECT', TOLERANCE)
    common.progress('converting to neighbour table')
    fm2 = arcpy.FieldMappings()
    fm.addFieldMap(common.fieldMap(joined, common.NEIGH_FROM_FLD, common.NEIGH_FROM_FLD, 'FIRST'))
    fm.addFieldMap(common.fieldMap(joined, common.NEIGH_TO_FLD, common.NEIGH_TO_FLD, 'FIRST'))
    if selfrel:
      query = common.safeQuery("[{}] <> '{}'".format(common.NEIGH_FROM_FLD, EXTERIOR_ID), joined)
    else:
      query = common.safeQuery("[{0}] <> [{1}] AND [{0}] <> '{2}'".format(
        common.NEIGH_FROM_FLD, common.NEIGH_TO_FLD, EXTERIOR_ID), joined)
    arcpy.TableToTable_conversion(joined, pathman.getLocation(), pathman.getOutputName(), query, fm2)
    common.clearFields(output, [common.NEIGH_FROM_FLD, common.NEIGH_TO_FLD])
  return output
Example #30
0
def table(zones, idFld, output, exterior=True, selfrel=True):
  common.debug('running neighbour table', zones, idFld, output, exterior, selfrel)
  with common.PathManager(output) as pathman:
    idKeeper = common.IDFieldKeeper(zones, idFld)
    intIDFld = idKeeper.intIDField()
    if exterior:
      common.progress('mapping zone surroundings')
      buffer = pathman.tmpFC()
      arcpy.Buffer_analysis(zones, buffer, '50 Meters', dissolve_option='ALL')
      common.progress('creating exterior zone')
      erased = pathman.tmpFC()
      arcpy.Erase_analysis(buffer, zones, erased, TOLERANCE)
      common.progress('identifying exterior zone')
      common.calcField(erased, intIDFld, EXTERIOR_ID, int)
      # common.progress('eliminating sliver polygons')
      common.progress('merging exterior zone')
      jointo = pathman.tmpFC()
      arcpy.Merge_management([zones, erased], jointo)
    else:
      jointo = zones
    common.progress('finding neighbours')
    swm = pathman.tmpFile(ext='swm')
    arcpy.GenerateSpatialWeightsMatrix_stats(jointo, intIDFld, swm, 'CONTIGUITY_EDGES_CORNERS')
    common.progress('converting to neighbour table')
    tmpTable = pathman.tmpTable()
    arcpy.ConvertSpatialWeightsMatrixtoTable_stats(swm, tmpTable)
    fromFld, toFld = idKeeper.transform(tmpTable, [intIDFld, 'NID'])
    fm = arcpy.FieldMappings()
    fm.addFieldMap(common.fieldMap(tmpTable, fromFld, common.NEIGH_FROM_FLD, 'FIRST'))
    fm.addFieldMap(common.fieldMap(tmpTable, toFld, common.NEIGH_TO_FLD, 'FIRST'))
    if selfrel:
      query = common.safeQuery("[{}] <> '{}'".format(fromFld, EXTERIOR_ID), tmpTable)
    else:
      query = common.safeQuery("[{0}] <> [{1}] AND [{0}] <> '{2}'".format(
        fromFld, toFld, EXTERIOR_ID), tmpTable)
    arcpy.TableToTable_conversion(tmpTable, pathman.getLocation(), pathman.getOutputName(), query, fm)
    common.clearFields(output, [common.NEIGH_FROM_FLD, common.NEIGH_TO_FLD])
  return output
Example #31
0
 def open(self, output, mappers):
   common.progress('preparing output')
   for mapper in mappers:
     mapper.open(output)
Example #32
0
    def train(self, data, labels, no_epochs=1000):
        batch_size = self.batch_size
        no_data = data.shape[0]
        bat_num = no_data // batch_size
        index_data = np.arange(no_data)
        print("Initiate...")

        if not self.is_restored:
            self.sess.run(tf.global_variables_initializer())

        self.writer_train = tf.summary.FileWriter(self.logs_path)
        if not os.path.isdir(self.save_path_models):
            os.makedirs(self.save_path_models)

        if not os.path.isdir(self.save_path_imgs):
            os.makedirs(self.save_path_imgs)

        if not os.path.isdir(self.save_path_generator):
            os.makedirs(self.save_path_generator)

        self.writer_train.add_graph(self.sess.graph)

        if self.no_classes > 10:
            labels_test = np.ones((10, 100), dtype=np.int16)
            for i in range(10):
                labels_test[i] = labels_test[i] * i
        else:
            labels_test = np.ones((self.no_classes, 100), dtype=np.int16)
            for i in range(self.no_classes):
                labels_test[i] = labels_test[i] * i

        z_test = np.random.normal(0, 1, (100, 100))
        generate_imgs_time = 0
        print("Start training ACGAN...")
        for epoch in range(no_epochs):
            print("")
            print('epoch {}:'.format(epoch + 1))
            np.random.shuffle(index_data)
            start = time.time()
            x_ = []
            y_ = []
            z_ = []
            for ite in range(bat_num):
                x_ = data[index_data[ite * batch_size:(ite + 1) * batch_size]]
                y_ = labels[index_data[ite * batch_size:(ite + 1) *
                                       batch_size]]
                y_onehot = one_hot(y_, self.no_classes)
                z_ = np.random.normal(0, 1, (batch_size, 100))
                z_ = np.concatenate((z_, y_onehot), axis=1)
                z_ = z_.reshape((batch_size, 1, 1, -1))

                if epoch == 0:
                    self.sess.run(self.d_clip)
                    _ = self.sess.run(self.D_optim, {
                        self.X: x_,
                        self.y: y_,
                        self.z: z_,
                        self.istraining: True
                    })
                    continue

                if (ite + 1) % 5 == 0:
                    # print('train g')
                    _ = self.sess.run(self.G_optim, {
                        self.X: x_,
                        self.y: y_,
                        self.z: z_,
                        self.istraining: True
                    })
                else:
                    # print('train D')
                    self.sess.run(self.d_clip)
                    _ = self.sess.run(self.D_optim, {
                        self.X: x_,
                        self.y: y_,
                        self.z: z_,
                        self.istraining: True
                    })

                if ite + 1 == bat_num:  # every self.FLAGS.F_show_img batchs or final batch, we show some generated images
                    for i in range(labels_test.shape[0]):
                        # c means class
                        labels_test_c = one_hot(labels_test[i],
                                                self.no_classes)
                        no_test_sample = len(labels_test_c)
                        z_test_c = np.concatenate((z_test, labels_test_c),
                                                  axis=1)
                        z_test_c = z_test_c.reshape(
                            (no_test_sample, 1, 1, self.z_dim))

                        G_Images_c = self.sess.run(self.g_net, {
                            self.z: z_test_c,
                            self.istraining: False
                        })
                        G_Images_c = (G_Images_c + 1.0) / 2.0
                        G_Images_c = make_image_from_batch(G_Images_c)
                        G_Images_c = (G_Images_c * 255).astype(np.uint8)
                        if self.no_channels == 3:
                            G_Images_c = G_Images_c[:, :, ::-1]
                        cv2.imwrite(
                            '{}/epoch_{}_class_{}.png'.format(
                                self.save_path_imgs, epoch, i), G_Images_c)
                    generate_imgs_time = generate_imgs_time + 1
                    labels_test_c = []
                progress(ite + 1, bat_num)
            # we will show the loss of only final batch in each epoch
            # self.list_loss = [
            #     self.loss_Class_fake, self.loss_Class_real, self.D_loss_w,
            #     self.g_loss_w, self.D_loss_all,  self.G_loss_all
            # ]
            loss_C_fake, loss_C_real, D_loss_w, g_loss_w, D_loss_all, G_loss_all = self.sess.run(
                self.list_loss,
                feed_dict={
                    self.X: x_,
                    self.y: y_,
                    self.z: z_,
                    self.istraining: False
                })
            D_loss_all = D_loss_all / 2.0
            summary_for_loss = self.sess.run(self.summary_loss,
                                             feed_dict={
                                                 self.loss_c_fake_ph:
                                                 loss_C_fake,
                                                 self.loss_c_real_ph:
                                                 loss_C_real,
                                                 self.loss_D_w_ph: D_loss_w,
                                                 self.loss_D_total_ph:
                                                 D_loss_all,
                                                 self.loss_G_w_ph: g_loss_w,
                                                 self.loss_G_total_ph:
                                                 G_loss_all,
                                                 self.istraining: False
                                             })
            self.writer_train.add_summary(summary_for_loss, epoch)
            save_path_models = self.saver_models.save(
                self.sess,
                "{}/model_{}/model.ckpt".format(self.save_path_models, epoch))

            save_path_G = self.saver_G.save(
                self.sess, "{}/model.ckpt".format(self.save_path_generator))

            stop = time.time()
            print("")
            print('time: {}'.format(stop - start))
            print('loss D: {}, loss G: {}'.format(D_loss_all, G_loss_all))
            print('saved model in: {}'.format(save_path_models))
            print('saved G in: {}'.format(save_path_G))
            print("")
            print("=======================================")
        self.writer_train.close()
        self.sess.close()
## IMPORT MODULES
import sys # basic
import traceback
sys.path.append('.')
import common, objects, loaders

if __name__ == '__main__':
  try:
    common.progress('loading tool parameters')
    # common.message(common.parameters(14))
    interLayer, query, interFromIDFld, interToIDFld, interStrengthFld = common.parameters(5)
    inSlots = {'from' : interFromIDFld, 'to' : interToIDFld, 'value' : interStrengthFld}
    outSlots = {'in' : 'SIG_IN', 'out' : 'SIG_OUT'}
    common.progress('loading interactions')
    inter = loaders.InteractionReader(interLayer, slots, where=query).read()
    common.progress('selecting significant interactions')
    signif = objects.Interactions.selectSignificant(inter)
    common.progress('writing output')
    loaders.InteractionPresenceMarker(interLayer, inSlots, outSlots, where=query).mark(signif)
    common.done()
  except:
    common.message(traceback.format_exc())
    raise
Example #34
0
startTime = datetime.datetime.now()
print "%s: connected to source and destination database" % (startTime)

print "Starting to import flows ..."
count = 0

while True:
	flow = importer.get_next_flow()
	if flow == None:
		break;

	queue_length = r.rpush(common.REDIS_QUEUE_KEY, json.dumps(flow))
	while queue_length > args.max_queue:
		print "Max queue length reached, importing paused..."
		time.sleep(10)
		queue_length = r.llen(common.REDIS_QUEUE_KEY)
	count += 1
	

common.progress(100, 100)

# Append termination flag to queue
# The preprocessing daemon will terminate with this flag.
r.rpush(common.REDIS_QUEUE_KEY, "END")

endTime = datetime.datetime.now()
print "%s: imported %i flows in %s" % (endTime, count, endTime - startTime)


Example #35
0
import arcpy, common, randomize

with common.runtool(3) as parameters:
  conns, sdStr, target = parameters
  sd = common.toFloat(sdStr, 'standard deviation of position change')
  common.progress('copying connections')
  arcpy.CopyFeatures_management(conns, target)
  shpFld = arcpy.Describe(target).ShapeFieldName

  prog = common.progressor('randomizing', common.count(target))
  rows = arcpy.UpdateCursor(target)
  for row in rows:
    newPt = randomize.randomizePoint(row.getValue(shpFld), sd)
    row.setValue(shpFld, newPt)
    rows.updateRow(row)
    prog.move()

  prog.end()
  del row, rows
Example #36
0
  sumAllFlds = common.parseFields(sumAllFldsStr)
  coreTrans = {fld : 'CORE_' + fld for fld in sumCoreFlds}
  allTrans = {fld : 'ALL_' + fld for fld in sumAllFlds}
  outSumFlds = coreTrans.values() + allTrans.values()
  coreTransItems = coreTrans.items()
  allTransItems = allTrans.items()
  countCores = common.toBool(countCoresStr, 'core count switch')
  countAll = common.toBool(countAllStr, 'zone count switch')

  zoneSlots = {'id' : idFld, 'assign' : regionFld}
  if coopFld:
    zoneSlots['core'] = coopFld
  for fld in set(nameFlds + regTransFlds + sumCoreFlds + sumAllFlds):
    zoneSlots[fld] = fld
    
  common.progress('reading zone data')
  zoneData = loaders.DictReader(zones, zoneSlots).read()
  print zoneData
  
  common.progress('computing region statistics')
  regionData = {} # TODO: pattern dict to be copied, will fasten
  for zoneDict in zoneData.itervalues():
    id = zoneDict['id']
    regID = zoneDict['assign']
    if regID is None: # no assignment -> will fall out of the result
      continue
    if regID not in regionData: # new region detected, create initial values
      regionData[regID] = {'id' : regID, 'coreids' : []}
      for outFld in outSumFlds:
        regionData[regID][outFld] = 0
      for fld in regTransFlds:
Example #37
0
def table(zones, idFld, output, exterior=True, selfrel=True):
    common.debug('running neighbour table', zones, idFld, output, exterior,
                 selfrel)
    with common.PathManager(output) as pathman:
        idKeeper = common.IDFieldKeeper(zones, idFld)
        intIDFld = idKeeper.intIDField()
        if exterior:
            common.progress('mapping zone surroundings')
            buffer = pathman.tmpFC()
            arcpy.Buffer_analysis(zones,
                                  buffer,
                                  '50 Meters',
                                  dissolve_option='ALL')
            common.progress('creating exterior zone')
            erased = pathman.tmpFC()
            arcpy.Erase_analysis(buffer, zones, erased, TOLERANCE)
            common.progress('identifying exterior zone')
            common.calcField(erased, intIDFld, EXTERIOR_ID, int)
            # common.progress('eliminating sliver polygons')
            common.progress('merging exterior zone')
            jointo = pathman.tmpFC()
            arcpy.Merge_management([zones, erased], jointo)
        else:
            jointo = zones
        common.progress('finding neighbours')
        swm = pathman.tmpFile(ext='swm')
        arcpy.GenerateSpatialWeightsMatrix_stats(jointo, intIDFld, swm,
                                                 'CONTIGUITY_EDGES_CORNERS')
        common.progress('converting to neighbour table')
        tmpTable = pathman.tmpTable()
        arcpy.ConvertSpatialWeightsMatrixtoTable_stats(swm, tmpTable)
        fromFld, toFld = idKeeper.transform(tmpTable, [intIDFld, 'NID'])
        fm = arcpy.FieldMappings()
        fm.addFieldMap(
            common.fieldMap(tmpTable, fromFld, common.NEIGH_FROM_FLD, 'FIRST'))
        fm.addFieldMap(
            common.fieldMap(tmpTable, toFld, common.NEIGH_TO_FLD, 'FIRST'))
        if selfrel:
            query = common.safeQuery(
                "[{}] <> '{}'".format(fromFld, EXTERIOR_ID), tmpTable)
        else:
            query = common.safeQuery(
                "[{0}] <> [{1}] AND [{0}] <> '{2}'".format(
                    fromFld, toFld, EXTERIOR_ID), tmpTable)
        arcpy.TableToTable_conversion(tmpTable, pathman.getLocation(),
                                      pathman.getOutputName(), query, fm)
        common.clearFields(output,
                           [common.NEIGH_FROM_FLD, common.NEIGH_TO_FLD])
    return output
import os, arcpy, numpy, math, common

with common.runtool(7) as parameters:
  points, valueField, extent, cellSizeStr, decay, distLimitStr, output = parameters
  decayCoef = common.toFloat(decay, 'distance decay exponent')
  cellSize = common.toFloat(cellSizeStr, 'cell size')
  
  ## LOAD POINTS and their values
  common.progress('loading points')
  pointList = []
  shapeFld = arcpy.Describe(points).ShapeFieldName
  pointCur = arcpy.SearchCursor(points)
  for row in pointCur:
    geom = row.getValue(shapeFld).getPart()
    val = row.getValue(valueField)
    if val is not None:
      pointList.append((geom.Y, geom.X, float(val)))
  del row, pointCur, geom
  
  ## CREATE RASTER
  # calculate row and column counts
  extVals = [common.toFloat(val, 'extent value') for val in extent.split()]
  extR = abs(extVals[3] - extVals[1])
  extC = abs(extVals[2] - extVals[0])
  rowCount = int(round(extR / float(cellSize)))
  colCount = int(round(extC / float(cellSize)))
  # bottom left pixel coordinates minus one pixel
  curX = extVals[1] - cellSize / 2.0
  curY = extVals[0] - cellSize / 2.0
  defCurY = curY
  
Example #39
0
 def __init__(self, location=None, **kwargs):
   common.progress('initializing paths')
   if location:
     self.location = common.checkFile(location)
Example #40
0
import sys, os, arcpy, math
sys.path.append('.')
import common

TMP_CIRCPTS = 'tmp_circ'
TMP_ALLPOLY = 'tmp_voroall'

with common.runtool(7) as parameters:
  common.progress('parsing attributes')
  ## GET AND PREPARE THE ATTRIBUTES
  # obtained from the tool input
  points, ptsIDFld, weightFld, normStr, transferFldsStr, tolerStr, outPath = parameters
  location, outName = os.path.split(outPath)
  normalization = math.sqrt(common.toFloat(normStr, 'normalization value') / math.pi)
  tolerance = common.toFloat(tolerStr, 'positional tolerance value')
  transferFlds = common.parseFields(transferFldsStr)
  
  common.progress('creating weighting layer')
  common.overwrite(True)
  circLayer = common.createFeatureClass(os.path.join(location, TMP_CIRCPTS), crs=points)
  inShpFld = arcpy.Describe(points).ShapeFieldName
  circShapeFld = arcpy.Describe(circLayer).ShapeFieldName
  arcpy.AddField_management(circLayer, ptsIDFld, common.outTypeOfField(points, ptsIDFld))
  
  inCount = common.count(points)
  common.progress('opening weighting layer')
  inCur = arcpy.SearchCursor(points)
  outCur = arcpy.InsertCursor(circLayer)
  prog = common.progressor('weighting points', inCount)
  pi2 = 2 * math.pi
  for inRow in inCur:
  objects.MultiInteractions.setDefaultLength(len(interFlds))
  objects.MonoZone.interactionClass = objects.MultiInteractions
  interSlots = {'from' : interFromIDFld, 'to' : interToIDFld}
  interSlots.update({fld : fld for fld in interFlds})
  measuresToSlots = {}
  outSlots = {}
  for measure in OUT_MEASURES:
    measuresToSlots[measure] = []
    for fld in interFlds:
      slotName = fld + '_' + OUT_MEASURES[measure]
      measuresToSlots[measure].append(slotName)
      outSlots[slotName] = slotName
  regload = loaders.RegionalLoader(regionalization.Regionalizer(objects.FunctionalRegion))
  regload.sourceOfZones(zoneLayer, zoneInSlots, targetClass=objects.MonoZone, coreQuery=zoneQuery)
  regload.sourceOfMultiInteractions(interLayer, interSlots, where=interQuery, ordering=interFlds)
  regload.load()
  common.progress('summarizing')
  measurer = objects.ZoneMeasurer()
  zoneData = {}
  indexes = range(len(interFlds))
  for zone in regload.getZoneList():
    zoneID = zone.getID()
    zoneData[zoneID] = {}
    for measure in OUT_MEASURES:
      vec = measurer.getMeasure(zone, measure)
      for i in indexes:
        zoneData[zoneID][measuresToSlots[measure][i]] = float(vec[i])
  loaders.ObjectMarker(zoneLayer, zoneInSlots, outSlots).mark(zoneData)

  # zoneLoader.outputMultiMeasures(zones, interactionLoader.getStrengthFieldNames(), measures=['TOT_IN_CORE', 'TOT_OUT_CORE'], aliases=['INSUM', 'OUTSUM'])
def eligibleEnds(rels, endIDs, invert=False):
  sources = set()
  targets = set()
  for source in rels:
    if (source in endIDs) ^ invert:
      sources.add(source)
      for target in rels[source]:
        if (target in endIDs) ^ invert:
          targets.add(target)
  return sources, targets

with common.runtool(8) as parameters:
  table, fromIDField, toIDField, places, placesIDField, placeQuery, invertStr, output = parameters
  invert = common.toBool(invertStr, 'place search inversion')
  common.progress('loading places')
  placeIDs = set(loaders.OneFieldReader(places, placesIDField, where=placeQuery).read())
  if placeIDs:
    # hack to retrieve random element from set
    for id in placeIDs: break
    quote = isinstance(id, str) or isinstance(id, unicode)
    common.progress('loading interactions')
    interRels = loaders.NeighbourTableReader(table, {'from' : fromIDField, 'to' : toIDField}).read()
    common.progress('preparing selection query')
    sources, targets = eligibleEnds(interRels, placeIDs, invert)
    qry = '(' + createSelectQuery(table, fromIDField, sources, quote=quote) + ') AND (' + createSelectQuery(table, toIDField, targets, quote=quote) + ')'
    common.progress('selecting interactions')
    print table, output, qry
    common.select(table, common.addTableExt(output), qry)
  else:
    common.warning('No places found')