import os
BASEURL = os.environ.get('CARTO_API_URL','https://jsanz.carto.com') # <-- replace with your username or set up the envvar
APIKEY = os.environ.get('CARTO_API_KEY',False) # <-- replace False with your CARTO API key or set up the envvar
ORG = os.environ.get('CARTO_ORG')
if BASEURL and APIKEY and ORG:
print('All good!')
All good!
from carto.auth import APIKeyAuthClient
carto_key = APIKeyAuthClient(BASEURL, APIKEY, ORG)
from carto.sql import SQLClient
from carto.exceptions import CartoException
import pandas as pd
from IPython.core.display import display, HTML
# SQL client
sql = SQLClient(carto_key)
# Helper function to render a nice table from a query
def print_query(query):
try:
dic = sql.send(query)
df = pd.DataFrame(dic['rows'])
display(HTML(df.to_html()))
except CartoException as e:
print("some error ocurred", e)
Let's run a geocoding function
print_query('''
SELECT name,
pop_max,
ST_AsText(cdb_geocode_namedplace_point(name,'Spain')) as wkt
FROM populated_places
WHERE adm0_a3 = 'ESP'
ORDER BY pop_max DESC LIMIT 5
''')
name | pop_max | wkt | |
---|---|---|---|
0 | Madrid | 5567000 | POINT(-3.669245 40.429913) |
1 | Barcelona | 4920000 | POINT(2.159592 41.400347) |
2 | Seville | 1212045 | POINT(-5.97997 37.383606) |
3 | Bilbao | 875552 | POINT(-2.937123 43.258534) |
4 | Valencia | 808000 | POINT(-0.363258 39.464773) |
from carto.file_import import FileImportJob
import time
Importing from a local file
fi = FileImportJob("brooklyn_poverty.gpkg",carto_key)
fi.run()
while fi.state != 'complete':
print('Importing...')
fi.refresh()
time.sleep(5)
print_query('SELECT * FROM brooklyn_poverty LIMIT 5')
Importing... Importing... Importing...
cartodb_id | commuters_16_over_2011_2015 | geoid | pop_determined_poverty_status_2011_2015 | poverty_count | poverty_per_pop | the_geom | the_geom_webmercator | total_pop_2011_2015 | total_population | walked_to_work_2011_2015 | |
---|---|---|---|---|---|---|---|---|---|---|---|
0 | 59 | 7933.785122 | 360470760003 | 16593.199780 | 289.389328 | 0.200408 | 0106000020E61000000100000001030000000100000008... | 0106000020110F00000100000001030000000100000008... | 25611.885816 | 1444 | 0.011213 |
1 | 96 | 12422.379729 | 360470381001 | 23171.502272 | 277.517123 | 0.234192 | 0106000020E61000000100000001030000000100000007... | 0106000020110F00000100000001030000000100000007... | 31835.053276 | 1185 | 0.006302 |
2 | 97 | 7998.999141 | 360470387003 | 21176.235374 | 104.167832 | 0.135459 | 0106000020E61000000100000001030000000100000007... | 0106000020110F00000100000001030000000100000007... | 24021.310491 | 769 | 0.007314 |
3 | 116 | NaN | 360470702030 | NaN | NaN | NaN | None | None | NaN | 0 | NaN |
4 | 127 | 5464.206106 | 360470039002 | 15644.554430 | 181.884956 | 0.360883 | 0106000020E61000000100000001030000000100000009... | 0106000020110F00000100000001030000000100000009... | 10386.507474 | 504 | 0.016299 |
Importing from a URL
url = 'https://builder-demo.carto.com/api/v2/sql?q=select%20*%20from%20%22builder-demo%22.railroad_data&format=gpkg&filename=railroad_data'
fi = FileImportJob(url,carto_key)
fi.run()
while fi.state != 'complete':
print('Importing...')
fi.refresh()
time.sleep(5)
print_query('SELECT accident_type, cause_subcategory FROM railroad_data LIMIT 5')
Importing... Importing... Importing... Importing...
accident_type | cause_subcategory | |
---|---|---|
0 | Derailment | Wheels |
1 | Derailment | Track Geometry |
2 | Derailment | Body |
3 | Derailment | Brake |
4 | Rearend collision | Miscellaneous |
from carto.datasets import DatasetManager
import warnings
warnings.filterwarnings('ignore')
dm = DatasetManager(carto_key)
Get metadata of all datasets
datasets = dm.all()
len(datasets)
95
Find datasets by their name
for dataset in datasets:
if dataset.name.startswith('railroad') or dataset.name.startswith('brooklyn_poverty'):
print(dataset.name)
railroad_data brooklyn_poverty
Remove datasets
for dataset in datasets:
if dataset.name.startswith('railroad') or dataset.name.startswith('brooklyn_poverty'):
print("Deleting {}...".format(dataset.name))
dataset.delete()
Deleting railroad_data... Deleting brooklyn_poverty...
Edit dataset metadata, save, restore
places_table = dm.get('populated_places')
print(places_table.description)
print(places_table.privacy)
LINK
import carto.permissions
places_table.description = 'Populated places dataset (desc set from the SDK)'
places_table.privacy = carto.permissions.PRIVATE
places_table.save()
<Response [200]>
places_table = dm.get('populated_places')
print(places_table.description)
print(places_table.privacy)
Populated places dataset (desc set from the SDK) PRIVATE
places_table.description = ''
places_table.privacy = carto.permissions.LINK
places_table.save()
<Response [200]>
places_table = dm.get('populated_places')
print(places_table.description)
print(places_table.privacy)
LINK
from carto.sql import BatchSQLClient
batch_client = BatchSQLClient(carto_key)
print_query('SELECT COUNT(*) from flights')
count | |
---|---|
0 | 395633 |
job = batch_client.create([
'''CREATE TABLE flights_batch AS SELECT * FROM flights TABLESAMPLE SYSTEM(25)''',
'''SELECT CDB_CartoDBfytable('jsanz','flights_batch')'''
])
job_id = job['job_id']
print("Job {} created at {}".format(job_id,job['created_at']))
# Let's check the status
while job['status'] == 'pending':
print(job['status'])
time.sleep(5)
job = batch_client.read(job_id)
print(job['status'])
print_query('SELECT COUNT(*) FROM flights_batch')
Job a22f74cf-b58f-43ef-bf1a-d18e82924b4a created at 2017-09-14T14:30:00.125Z pending done
count | |
---|---|
0 | 97371 |
try:
dm.get('flights_batch').delete()
except Exception as e:
print('Something happened!')
from carto.visualizations import VisualizationManager
vm = VisualizationManager(carto_key)
Get all the account maps metadata
vizs = vm.all()
Show the names of the first 10
for viz in vizs[0:10]:
print(viz.name)
Geoinquietos Test boundaries aggregation torque frown Mapping Sax Test named map NYC Tree Map groups Cow Map Foursquare Checkins CARTO team map
Show the description from one of them
flickr = vm.get('Flickr Great Shots 2016')
flickr.description
'This map is updated every day with last Flickr [most interesting photos](https://www.flickr.com/explore).'
Get the map download link
flickr.export()
'http://s3.amazonaws.com/com.cartodb.imports.production/e7be05bc424f730853dd/Flickr%20Great%20Shots%202016%20%28on%202017-09-14%20at%2014.30.34%29.carto?AWSAccessKeyId=AKIAJUI5EFFJIRZMEEMA&Expires=1505406635&Signature=vHTPqMdoFKYt2GUGX4JrVNCy78Y%3D&response-content-disposition=attachment%3Bfilename%3D%22Flickr%2520Great%2520Shots%25202016%2520%28on%25202017-09-14%2520at%252014.30.34%29.carto%22%3Bfilename%2A%3Dutf-8%27%27Flickr%2520Great%2520Shots%25202016%2520%28on%25202017-09-14%2520at%252014.30.34%29.carto'
from carto.maps import NamedMapManager
nm = NamedMapManager(carto_key)
named_maps = nm.all()
Print the custom templates in the account (those that don't start with tpl
)
for named_map in named_maps:
template_id = named_map.template_id
if not template_id.startswith('tpl'):
print(named_map.template_id)
named_map_test_in cartoframes_ver20170406_layers1_time0_baseid1_labels0_zoom1 cartoframes_ver20170406_layers2_time0_baseid1_labels0_zoom0 test_named_map populated_places_1491426149_69 test_literal cartoframes_ver20170406_layers2_time0_baseid1_labels0_zoom1 populated_places_1491425943_37 cartoframes_ver20170406_layers1_time0_baseid1_labels0_zoom0 populated_places_1491425172_65
Print some details from one of the templates
test_named = nm.get('test_named_map')
for layer in test_named.layergroup['layers']:
print()
if layer['type'] == 'http':
print('HTTP Layer')
print(layer['options']['urlTemplate'])
elif layer['type'] == 'cartodb':
print('CARTO Layer')
print(layer['options']['sql'])
HTTP Layer https://cartodb-basemaps-{s}.global.ssl.fastly.net/light_all/{z}/{x}/{y}.png CARTO Layer SELECT * FROM (SELECT * \ FROM nycpluto_all \ WHERE ST_Intersects(\ ST_Buffer(\ ST_SetSRID(\ ST_GeomFromText('POINT(-73.988371 40.736)'),\ 4326\ )::geography,\ 1000)::geometry,\ the_geom) ) AS wrapped_query WHERE <%= layer0 %>=1