def reportNaipStatus(args): conn, cur = getDatabase() # report for year year = args.get('year', CONFIG['year']) # number of possible doqqs by state sql = 'select st, count(*) from naipbbox{} group by st order by st'.format(year) cur.execute( sql ) print print 'Number of potential DOQQs by state for {}'.format(year) print '-------------------------------------------' for row in cur: print ' {0:>4s} | {1:8,d}'.format(row[0], row[1]) print # number of doqqs downloaded by state sql = '''select st, count(*) from naipbbox{0} a left outer join naipfetched{0} b on a.gid=b.gid where b.gid is not null group by st order by st'''.format(year) cur.execute( sql ) print 'Number of downloaded DOQQs by state for {}'.format(year) print '-------------------------------------------' for row in cur: print ' {0:>4s} | {1:8,d}'.format(row[0], row[1]) print # TODO number of doqqs converted by state conn.close()
def processCommand(self): db = utils.getDatabase() c_cloud_instances = db['cloud_instances'] c_subscription_audit_trail = db['subscription_audit_trail'] instance_id = self.json_data['ci_id'].upper() cloud_instance = c_cloud_instances.find_one({"_id": instance_id}) if cloud_instance is None: db.connection.close() msg = "The cloud instance can not be found by your provided cloud id" return message(msg) c_subscription = db['subscription'] subscription = c_subscription.find_one({"_id":instance_id}) if subscription is None: db.connection.close() msg = "The subscription record is not existed" return message(msg) #check the service request status, the decide whether to add new record service_requests = subscription['service_requests'] request_id = str(uuid.uuid4()) effective_date = datetime.strptime(self.json_data['effective_date'],'%Y-%m-%d') requests = {"_id":request_id,"service_request_type":"scale_up_cloud","service_request_items":self.json_data['items'],"service_request_status":"request","process_history":[{"service_request_status":"request","effective_ts":str(effective_date),"process_ts":str(datetime.now()),"process_by":"CBO"}]} d_audit_trail = {"_id":str(uuid.uuid4()),"subscription_id":instance_id,"process_ts":str(datetime.now()),"process_by":"CBO","process_type":"scale_up_cloud","subscription_old_copy":subscription} if service_requests is None: c_subscription_audit_trail.insert(d_audit_trail) subscription['service_requests'] = requests c_subscription.save(subscription) db.connection.close() return request_id list_status = [] for req in service_requests: list_status.append(req["service_request_status"]) if "request" in list_status: db.connection.close() msg = "There are pending request list.Please request after they are complete" return message(msg) c_subscription_audit_trail.insert(d_audit_trail) service_requests.append(requests) subscription['service_requests'] = service_requests c_subscription.save(subscription) db.connection.close() return request_id
def processNaipFromList(files, procn, year): conn, cur = getDatabase() table = CONFIG['naip.shptable'].format(year) for f in files: parts = os.path.split(f) # "path", "file" fname = os.path.splitext(parts[1])[0] # "file", "ext" # look up file in database to get its gid sql = """select filename, gid from {0} where filename like '{1}%'""".format(table, fname) cur.execute( sql ) rows = cur.fetchall() for row in rows: if processDOQQ( row, procn, year ): sql = 'update naipfetched{0} set processed=true where gid={1}'.format(year, row[1]) cur.execute( sql ) conn.commit() conn.close()
def loadsegments(fsegshp, year, job): verbose = CONFIG.get('verbose', False) epsg = CONFIG.get('naip.projection', 'EPSG:4326') dsn = 'PG:' + CONFIG['dsn'] table = CONFIG.get('seg.table', 'segments.y{0}_{1}').format(year, job) conn, cur = getDatabase() sql = 'drop table if exists {} cascade'.format(table) if verbose: print sql cur.execute(sql) conn.commit() conn.close() cmd = [ 'ogr2ogr', '-t_srs', epsg, '-nln', table, '-overwrite', '-lco', 'OVERWRITE=YES', '-lco', 'PRECISION=NO', '-lco', 'GEOMETRY_NAME=geom', '-lco', 'FID=gid', '-f', 'PostgreSQL', dsn, fsegshp ] runCommand(cmd, verbose)
def processNaipFromQuery(nproc, procn, limit, year): conn, cur = getDatabase() verbose = CONFIG.get('verbose', False) clause = '' if nproc > 1: clause = " and a.gid % {0} = {1} ".format(nproc, procn) clause2 = '' if limit > 0: clause2 = " limit {0} ".format(limit) sql = """select filename, a.gid from naipbbox{0} a left outer join naipfetched{0} b on a.gid=b.gid where b.gid is not null and b.processed is null {1} order by a.gid {2} """.format(year, clause, clause2) if verbose: print 'proc: {}, sql: {}'.format(procn, sql) cur.execute( sql ) rows = cur.fetchall() if verbose: print 'proc: {}, count: {}'.format(procn, len(rows)) for row in rows: if processDOQQ( row, procn, year ): sql = 'update naipfetched{0} set processed=true where gid={1}'.format(year, row[1]) cur.execute( sql ) conn.commit() conn.close()
def getDoqqsForArea(year, areaOfInterest): verbose = CONFIG.get('verbose', False) home = CONFIG['projectHomeDir'] doqqs = CONFIG['naip.doqq_dir'] doqqDir = os.path.join(home, doqqs, year) conn, cur = getDatabase() # use default areaOfInterest in CONFIG if len(areaOfInterest) == 0: areaOfInterest = CONFIG['areaOfInterest'] # analyze areaOfInterest to see if its a fips code or bbox if re.match(r'^[0-9]+$', areaOfInterest): if len(areaOfInterest) == 2: # we have a useful state code st = FIPS2ST[areaOfInterest].upper() join = '' where = " a.st='{}'".format(st) elif len(areaOfInterest) == 5: # we have a useful county code join = ' join county c on st_intersects(a.geom, c.geom) ' where = " c.geoid = '{}' ".format(areaOfInterest) elif len(areaOfInterest) == 10: # we have a cousub code join = ' join cousub c on st_intersects(a.geom, c.geom) ' where = " c.geoid = '{}' ".format(areaOfInterest) else: # not sure what we have print "ERROR: Area of interest is not understood ({})!".format( areaOfInterest) conn.close() sys.exit(1) sql = '''select a.gid, filename from naipbbox{0} a left outer join naipfetched{0} b on a.gid=b.gid {1} where b.gid is not null and {2} '''.format(year, join, where) elif re.match( r'^(-?[0-9]+\.?[0-9]*),(-?[0-9]+\.?[0-9]*),(-?[0-9]+\.?[0-9]*),(-?[0-9]+\.?[0-9]*)$', areaOfInterest): bbox = areaOfInterest.split(',') sql = '''select a.gid, filename from naipbbox{0} a left outer join naipfetched{0} b on a.gid=b.gid where b.gid is not null and 'LINESTRING({1} {2},{3} {4})'::geometry && a.geom'''.format( year, bbox[0], bbox[1], bbox[2], bbox[3]) else: print "ERROR: Area of interest is not understood ({})!".format( areaOfInterest) conn.close() return True if verbose: print sql cur.execute(sql) files = [] for row in cur: filename = row[1] sdir = filename[2:7] name = filename[:26] + '.tif' files.append(os.path.join(doqqDir, sdir, name)) conn.close() if len(files) == 0: print "No files were found to process your request!" sys.exit(1) return files
def CensusFetch(): baseurl = CONFIG.get('census.url', '') verbose = CONFIG.get('verbose', False) year = CONFIG.get('census.year', '') if baseurl == '' or year == '': print "ERROR in config.py, census.url or census.year are not defined!" sys.exit(2) # make dir the outdir path exists outdir = CONFIG['projectHomeDir'] + '/data/census/' if not os.path.exists( outdir ): os.makedirs( outdir ) # -------------- fetch the county data ----------------------------- if True: url = baseurl + '/' + '/'.join(CONFIG['census.layers']['county']) url = url % ( year, year ) cmd = ['wget', '-o', outdir + 'county.log', '-N', '-nv', '-nd', '-nH', '-P', outdir, url] if verbose: print ' '.join(cmd) subprocess.call(cmd) zipfile = r'.*_county\.zip$' loadZippedShape( 'census.county', outdir, zipfile, 'PROMOTE_TO_MULTI' ) # -------------- deal with area of interest ------------------------ # # this needs to be after county data is loaded # because we search that against the BBOX # # this might be a FIPS code or a BBOX list aoi = CONFIG.get('areaOfInterest', '') if type(aoi) == str and len(aoi) == 0: print "areaOfInterest is disabled in config.py" sys.exit(2) elif type(aoi) == list: conn, cur = getDatabase() states, counties = getStatesCountiesFromBbox( cur, CONFIG['areaOfInterest'] ) conn.close() else: states = [ aoi[:2] ] if len(aoi) >= 5: counties = [ aoi[:5] ] elif len(aoi) == 2: counties = [ aoi[:2] + '*' ] else: counties = [ '*' ] # make sure we have at least 1 county if len(states) == 0: print "ERROR: no counties were selected!" sys.exit(2) # -------------- fetch cousub data ---------------------------------- if True: url = baseurl + '/' + CONFIG['census.layers']['cousub'][0] + '/' url = url % ( year ) for ss in states: fzip = CONFIG['census.layers']['cousub'][1] % ( year, ss ) cmd = ['wget', '-o', outdir + 'cousub.log', '-N', '-nv', '-nd', '-nH', '-P', outdir, url + fzip] if verbose: print ' '.join(cmd) subprocess.call(cmd) zipfile = r'.*_cousub\.zip$' loadZippedShape( 'census.cousub', outdir, zipfile, 'PROMOTE_TO_MULTI' ) # -------------- fetch roads data ----------------------------------- if True: url = baseurl + '/' + CONFIG['census.layers']['roads'][0] + '/' url = url % ( year ) for cc in counties: fzip = CONFIG['census.layers']['roads'][1] % ( year, cc ) cmd = ['wget', '-o', outdir + 'roads.log', '-N', '-nv', '-nd', '-nH', '-P', outdir, url + fzip] if verbose: print ' '.join(cmd) subprocess.call(cmd) zipfile = r'.*_roads\.zip$' loadZippedShape( 'census.roads', outdir, zipfile, 'MULTILINESTRING' )
def getNaipFiles(year, areaOfInterest, donaip): verbose = CONFIG.get('verbose', False) home = CONFIG['projectHomeDir'] doqqs = CONFIG['naip.download'] doqqDir = os.path.join( home, doqqs, year ) if not os.path.exists(doqqDir): os.makedirs(doqqDir) conn, cur = getDatabase() # template url for fething files try: template = CONFIG['naip.url']['doqq.urls'][year] except: template = '' if template == '': print 'ERROR: CONFIG[naip.url][doqq.urls][{}] is not configured!'.format(year) conn.close() return True # create table if not exists to log downloaded DOQQs sql = '''create table if not exists naip.naipfetched{} ( gid integer not null primary key, processed boolean)'''.format(year) cur.execute( sql ) # use default areaOfInterest in CONFIG if len(areaOfInterest) == 0: areaOfInterest = CONFIG['areaOfInterest'] # analyze areaOfInterest to see if its a fips code or bbox if re.match(r'^[0-9]+$', areaOfInterest): if len(areaOfInterest) == 2: # we have a useful state code st = FIPS2ST[areaOfInterest].upper() join = '' where = " a.st='%s'" % (st) elif len(areaOfInterest) == 5: # we have a useful county code join = ' join county c on st_intersects(a.geom, c.geom) ' where = " c.geoid = '%s' " % (areaOfInterest) elif len(areaOfInterest) == 10: # we have a cousub code join = ' join cousub c on st_intersects(a.geom, c.geom) ' where = " c.geoid = '%s' " % (areaOfInterest) else: # not sure what we have print "ERROR: Area of interest is not understood (%s)!" % areaOfInterest conn.close() return True sql = '''select count(*) from naipbbox{0} a left outer join naipfetched{0} b on a.gid=b.gid {1} where b.gid is null and {2} '''.format(year, join, where) cur.execute(sql) count = cur.fetchone()[0] sql = '''select a.gid, filename from naipbbox{0} a left outer join naipfetched{0} b on a.gid=b.gid {1} where b.gid is null and {2} '''.format(year, join, where) elif re.match(r'^(-?[0-9]+\.?[0-9]*),(-?[0-9]+\.?[0-9]*),(-?[0-9]+\.?[0-9]*),(-?[0-9]+\.?[0-9]*)$', areaOfInterest): bbox = areaOfInterest.split(',') # query to get list of doqqs to fetch sql = '''select count(*) from naipbbox{0} a left outer join naipfetched{0} b on a.gid=b.gid where b.gid is null and 'LINESTRING({1} {2},{3} {4})'::geometry && a.geom'''.format(year, bbox[0], bbox[1], bbox[2], bbox[3]) cur.execute(sql) count = cur.fetchone()[0] sql = '''select a.gid, filename from naipbbox{0} a left outer join naipfetched{0} b on a.gid=b.gid where b.gid is null and 'LINESTRING({1} {2},{3} {4})'::geometry && a.geom'''.format(year, bbox[0], bbox[1], bbox[2], bbox[3]) else: print "ERROR: Area of interest is not understood (%s)!" % areaOfInterest conn.close() return True print 'Plan is to download {} DOQQs'.format(count) if donaip: if verbose: print sql cur.execute( sql ) # get a 2nd cur for updating table cur2 = conn.cursor() sql = "insert into naipfetched{0} values ({1}) on conflict do nothing" # loop through list and download them for row in cur: # fetch the file filename = row[1] sdir = filename[2:7] name = filename[:26] + '.tif' url = template.format(sdir, name) outdir = os.path.join( doqqDir, sdir ) if not os.path.exists( outdir ): os.makedirs( outdir ) log = os.path.join( home, doqqs, 'doqqs-{}.log'.format(year)) cmd = ['wget', '-a', log, '-N', '-nv', '-nd', '-nH', '-P', outdir, url] if verbose: print ' '.join( cmd ) subprocess.call( cmd ) # mark it as downloaded cur2.execute( sql.format(year, row[0]) ) conn.commit() conn.close() return False