def index(): form = DotCartoForm() fi = None first = None old_map_name = None if form.validate_on_submit(): # import ipdb; ipdb.set_trace() # cred = json.load(open('credentials.json')) # modify credentials.json.sample # if username == '': username = form.carto_api_endpoint.data # if apikey == '': apikey = form.carto_api_key.data # if cartojson == '': cartojson = form.cartojsontemplate.data.replace( ' ', '_').lower() + '.carto.json' # if first == '': print 'cartjson assigned' # if cartojson == 'dma_heatmap.carto.json': # first = 'dma_master_polygons_merge' if cartojson == 'dmas_oct.carto.json': first = 'dma_visit_index_template_data_may2017v1' elif cartojson == 'visits_final.carto.json': first = 'visitindexheatmapexample' print first # if second == '': second = form.new_dataset_names.data # if cartojson == 'dma_heatmap.carto.json': # old_map_name = "TEMPLATE (USE THIS) - DMA Heatmap" if cartojson == 'dmas_oct.carto.json': old_map_name = "NinthDecimal DMAs - 0914" elif cartojson == 'visits_final.carto.json': old_map_name = "NinthDecimal Visits Index - 0914" print first new_map_name = form.map_title_name.data # cartojson = 'template.carto.json' curTime = datetime.now().strftime('%Y_%m_%d_%H_%M_%S') inFile = 'data/' + cartojson ouFile = 'data/_temp/' + cartojson.replace( '.carto.json', '') + '_' + curTime + '.carto.json' fiFile = 'data/_temp/' + new_map_name.replace( ' ', '_').lower() + '_' + cartojson.replace( '.carto.json', '') + '_' + curTime + '.carto.json' print inFile, ouFile, first, second openFileReplaceDatasetSave(inFile, ouFile, first, second) print inFile, ouFile, old_map_name, new_map_name openFileReplaceDatasetSave(ouFile, fiFile, old_map_name, new_map_name) cl = CartoDBAPIKey(apikey, username) # Import csv file, set privacy as 'link' and create a default viz fi = FileImport(fiFile, cl, create_vis='true', privacy='link') fi.run() return render_template("index.html", form=form, result=[str(fi)])
def push_to_cartodb(f): """ send dataset f to cartodb, return success of import """ print "attempting to import into cartodb" config = loadConfig() cl = CartoDBAPIKey(config["API_KEY"],config["user"]) fi = FileImport(f,cl,table_name='python_table_test') fi.run() return fi.success
def push_to_cartodb(f): """ send dataset f to cartodb, return success of import """ print "attempting to import into cartodb" config = loadConfig() cl = CartoDBAPIKey(config["API_KEY"], config["user"]) fi = FileImport(f, cl, table_name='python_table_test') fi.run() return fi.success
def insertar_mapa(file_name,grupo,group_folder_id): carto_username = '******' carto_key = '17179e6a8fc54fe03857e65f1d562caf98a8d4bb' cl = CartoDBAPIKey(carto_key, carto_username) grupo = grupo.reset_index() grupo[[u'p8 2. N\xfamero de c\xe9dula',u'p79 8. G\xe9nero',u'p12 Edad',u'p54 12. \xbfTiene alguna discapacidad?',u'p27 18. Sedes',u'p81 24. \xbfTiene un horario fijo para ir al trabajo?','Emisiones','Latitude','Longitude']].to_excel('grupos/'+file_name+'.xlsx',index=False) grupo[[u'p8 2. N\xfamero de c\xe9dula',u'p79 8. G\xe9nero',u'p12 Edad',u'p54 12. \xbfTiene alguna discapacidad?',u'p27 18. Sedes',u'p81 24. \xbfTiene un horario fijo para ir al trabajo?','Emisiones','Latitude','Longitude']].to_csv('grupos/'+file_name+'.csv',index=False) insert_file(file_name+'.csv',' ',group_folder_id, 'grupos/'+file_name+'.csv',mimetype='text/csv') fi = FileImport('grupos/'+file_name+'.xlsx', cl, privacy='public',content_guessing='true',create_vis='true',table_name=file_name) fi.run() call = 'https://'+carto_username+'.carto.com/api/v1/imports/'+str(fi.item_queue_id)+'?api_key='+carto_key request = requests.get(call)
def tocarto(table, url, geom_field=None): from cartodb import CartoDBAPIKey, CartoDBException, FileImport # parse url # get name of filename = name + '.csv' # write to csv # TODO can FileImport take a stream so we don't have to write to csv first? (etl.reproject(table, 4326, geom_field=geom_field) .rename('shape', 'the_geom') .tocsv(filename) ) API_KEY = '<api key>' DOMAIN = '<domain>' cl = CartoDBAPIKey(API_KEY, DOMAIN) fi = FileImport(filename, cl, privacy='public') fi.run()
def cleanNinthDecimal(inFile, inFileName, username, apikey): curTime = datetime.now().strftime('%Y_%m_%d_%H_%M_%S') ouFileName = inFileName.replace('.csv', '') ouFile = 'data/ninth_decimal_' + curTime + '.csv' ouFile = 'data/_send/' + ouFileName + '.csv' with open(ouFile, 'w') as csvoutput: writer = csv.writer(csvoutput, lineterminator='\n') # This code opens our bytestream with \r as the newline newline_wrapper = TextIOWrapper(inFile, newline=None) # newline='\r') reader = csv.reader( newline_wrapper) # , delimiter = ',')#, lineterminator='\r') all = [] row = next(reader) row.append('the_geom') all.append(row) geoFenceColLoc = row.index('geofence') for row in reader: row.append(reorderLatLng(row[geoFenceColLoc])) all.append(row) writer.writerows(all) cl = CartoDBAPIKey(apikey, username) # Import csv file, set privacy as 'link' and create a default viz fi = FileImport(ouFile, cl, create_vis='false', privacy='link', content_guessing='false', type_guessing='false') fi.run() return fi
["@raphaelvarane"], ["@nachofi1990"], ["Kiko Casilla", "@KikoCasilla13"], ["Rubén Yáñez", "@RYanez93"], ["Mateo Kovacic", "@MateoKova16"], ["@JeseRodriguez10"], ["Arbeloa", "@aarbeloa17"], ] table_name = TABLE_NAME for i, category in enumerate(categories): print category new_job = p.jobs.create(START_TIMESTAMP, END_TIMESTAMP, TABLE_NAME, category, geo_enrichment=False) if i != 14: # 14 is Bale duplicated in the original player list new_job.export_tweets(category=i + 1, append=False if i == 0 else True) import_job = FileImport(table_name + ".csv", cdb) import_job.run() print "SUCCESS", import_job.success state = "uploading" while state != "complete" and state != "failure": time.sleep(5) import_job.update() print import_job.state
from _settings import * from cartodb import CartoDBAPIKey, CartoDBException, FileImport # gitignored secret info from _secret_info import cartodb_domain, API_KEY cl = CartoDBAPIKey(API_KEY, cartodb_domain) tr95 = wp + '/to_cartodb/nyctrees1995.csv' tr05 = wp + '/to_cartodb/nyctrees2005.csv' tr15 = wp + '/to_cartodb/nyctrees2015.csv' fi = FileImport( tr95, cl, create_vis='true', privacy='public' ) # Import csv file, set privacy as 'link' and create a default viz fi.run() #https://github.com/CartoDB/cartodb-python fi = FileImport( tr05, cl, create_vis='true', privacy='public' ) # Import csv file, set privacy as 'link' and create a default viz fi.run() #https://github.com/CartoDB/cartodb-python fi = FileImport( tr15, cl, create_vis='true', privacy='public' ) # Import csv file, set privacy as 'link' and create a default viz fi.run() #https://github.com/CartoDB/cartodb-python
Code: import csv files (timeline.csv and/or tweets.csv) from previous scripts to CartoDB Note: see below to know how to create an animated or torque map with the imported data ''' #Import modules from cartodb import CartoDBAPIKey, CartoDBException, FileImport #api configuration API_KEY ='API_KEY' cartodb_domain = 'USER_DOMAIN' cl = CartoDBAPIKey(API_KEY, cartodb_domain) #import csvs fi = FileImport("C://timeline.csv", cl) #fi = FileImport("C://tweets.csv", cl) comment last line, and uncomment this to use the code fi.run() ''' How to create animated map with point data using Torque in CartoDB*: Torque is a visualization that allows you to visualize geographic data over time. You can access it just like you would other Visualization Wizards, from the pull-out tray on the right of your screen, under the Visualization Wizard. Once you select Torque, you’ll notice that the column that CartoDB picked to visualize is the cartodbid column. This column is just an arbitrarily assigned ID number that CartoDB uses and assigns based on the order of the data in your spreadsheet. In terms of mapping, it’s usually fairly meaningless, so you will want to change it to something more meaningful, as it’s just the order that the data is in in the table. We should go ahead and select the column labeled date [note: change your column Time data type from string to date in your dataset view] since that makes the most sense here.
from _settings import * from cartodb import CartoDBAPIKey, CartoDBException, FileImport # gitignored secret info from _secret_info import cartodb_domain, API_KEY cl = CartoDBAPIKey(API_KEY, cartodb_domain) inFile = wo+'/cb2010tree.csv' fi = FileImport(inFile, cl, create_vis='true', privacy='public') # Import csv file, set privacy as 'link' and create a default viz fi.run() #https://github.com/CartoDB/cartodb-python
def test_file_import(self): fi = FileImport(IMPORT_FILE, self.client) fi.run() self.assertIsNotNone(fi.id)
def get_tweets(event, context): try: latest = cdb.sql( "select * from {table_name} order by postedtime desc limit 1". format(table_name=TABLE_NAME))["rows"][0] except CartoDBException as e: print("some error ocurred", e) except (IndexError, AttributeError): start_timestamp = datetime.strptime( config.get('interval', 'start_timestamp'), "%Y%m%d%H%M") else: start_timestamp = dateutil.parser.parse(latest["postedtime"]) end_timestamp = datetime.utcnow() tmp_table_name = TABLE_NAME + "_" + ''.join( random.choice(string.ascii_uppercase) for _ in range(25)) tmp_table_filename = tmp_table_name for i, category in enumerate(categories): new_job = p.jobs.create(start_timestamp, end_timestamp, tmp_table_name, category) new_job.export_tweets(category=i + 1, append=False if i == 0 else True) # Now, because we can't use ogr2ogr, here comes the HACK! # 1) Import file into cartodb.com import_job = FileImport(tmp_table_name + ".csv", cdb) import_job.run() if import_job.success is True: import_job.update() else: return while import_job.state != "complete" and import_job.state != "failure": time.sleep(5) import_job.update() if import_job.state == "failure": return tmp_table_name = import_job.table_name # Just in case it changed during import print "TMP_TABLE_NAME", tmp_table_name # 3) Append new data from temp table to master table (!!! seq name is hardcoded !!!) try: print cdb.sql( "INSERT INTO {account_name}.{table_name} (actor_displayname,actor_followerscount,actor_friendscount," "actor_id,actor_image,actor_listedcount,actor_location," "actor_postedtime,actor_preferredusername,actor_statusescount,actor_summary,actor_utcoffset," "actor_verified,body,category_name,category_terms,favoritescount,geo," "inreplyto_link,link,location_geo,location_name," "object_type," "postedtime,retweetcount,the_geom,twitter_entities," "twitter_lang,cartodb_id) SELECT actor_displayname,actor_followerscount,actor_friendscount," "actor_id,actor_image,actor_listedcount,actor_location," "actor_postedtime,actor_preferredusername,actor_statusescount,actor_summary,actor_utcoffset," "actor_verified,body,category_name,category_terms,favoritescount,geo," "inreplyto_link,link,location_geo,location_name," "object_type," "postedtime,retweetcount,the_geom,twitter_entities," "twitter_lang,nextval('rm_cartodb_id_seq_0') as cartodb_id " "FROM {account_name}.{tmp_table_name}".format( table_name=TABLE_NAME, tmp_table_name=tmp_table_name, account_name=ACCOUNT_NAME)) except CartoDBException as e: print("Data couldn't be appended to master table", e) # 4) Delete temporary table try: print cdb.sql("DROP TABLE %s CASCADE" % tmp_table_name) except CartoDBException as e: print("some error ocurred", e) try: os.remove(tmp_table_filename + '.csv') except OSError: pass