Skip to content

Commit 01bfd61

Browse files
authored
Merge pull request #2 from conveyal/update-batch-download
Update batch regional download example
2 parents e44c340 + edcdd83 commit 01bfd61

File tree

3 files changed

+40
-100
lines changed

3 files changed

+40
-100
lines changed

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
Examples of how to download travel time surfaces (`.tiff` format) from [Conveyal Analysis](http://conveyal.com/analysis), extract travel time contours (isochrones), and save them as `.geojson`, or create a `.csv` of accessibility results, for multiple origins
2-
3-
Details are in the jupyter lab notebook `batch-isos.ipynb` (viewable directly on GitHub)
1+
Examples of batch downloads from [Conveyal](http://conveyal.com/analysis):
2+
- Download travel time surfaces (in the `.tif` GeoTiff format) for multiple origins, extract travel time contours (isochrones), and save them as `.geojson` or create a `.csv` of accessibility results. See the `batch-isos.ipynb` jupyter lab notebook (viewable directly on GitHub).
3+
- Download a .zip file of regional analysis results including all combinations of time cutoffs, percentiles, and destination layers (in the `.tif` GeoTiff format). See the `batch-result-download.ipynb` jupyter lab notebook (viewable directly on GitHub).

batch-result-download.ipynb

Lines changed: 35 additions & 80 deletions
Original file line numberDiff line numberDiff line change
@@ -5,17 +5,15 @@
55
"metadata": {},
66
"source": [
77
"# Batch Result Download\n",
8-
"Download a batch of Conveyal regional analysis results\n",
8+
"Download a batch of Conveyal regional analysis results. This script was streamlined in April 2024 with the v7.2 Conveyal / R5 release.\n",
99
"\n",
1010
"Setup requires:\n",
1111
" - Completed regional analysis results, viewable in the Conveyal user interface\n",
12-
" - JSON with array values for the keys `analysisIds`, `cutoffs`, `pointSetId`, and `percentile`, plus the `regionId` saved to `config/regionalAnalysisParameters.json`. The values in the arrays can be derived from the URL of a regional analysis results page in the Conveyal user interface: `https://analysis.conveyal.com/regions/[regionId]/regional/[analysisId]?cutoff=[cutoff]&pointSetId=[pointSetId]&percentile=[percentile]`\n",
12+
" - JSON with array values for the key `analysisIds` saved to `config/regionalAnalysisParameters.json`. The values in the array can be derived from the URL of regional analysis results pages in the Conveyal user interface: `https://analysis.conveyal.com/regions/[regionId]/regional/[analysisId]`\n",
1313
" - A current Conveyal token (e.g. 'bearer 1234abcd...') saved at `config/.auth`, based on the `idToken` shown at https://analysis.conveyal.com/session (for logged in users).\n",
14-
" - Optionally, a session cookie copied from browser DevTools saved at `config/.cookie`\n",
1514
"\n",
16-
"After the setup cell in this notebook, there are cells to: \n",
17-
" - Optionally, fetch the names of regional analyses and destination pointset layers\n",
18-
" - Download all combinations of analyses, cutoffs, destination pointsets, and percentiles"
15+
"After the setup cell in this notebook, the remaining cell: \n",
16+
" - Downloads a .zip file containing geotiff files for all combinations of analyses, cutoffs, destination pointsets, and percentiles"
1917
]
2018
},
2119
{
@@ -27,60 +25,24 @@
2725
},
2826
{
2927
"cell_type": "code",
30-
"execution_count": 1,
28+
"execution_count": null,
3129
"metadata": {},
3230
"outputs": [],
3331
"source": [
3432
"import requests\n",
3533
"import json\n",
3634
"import urllib\n",
3735
"import os\n",
36+
"import time\n",
3837
"\n",
3938
"config = json.load(open('config/regionalAnalysisParameters.json'))\n",
4039
"\n",
41-
"# Authorization header copied from DevTools Network request or https://analysis.conveyal.com/session\n",
40+
"# Authorization header with idToken copied from https://analysis.conveyal.com/session\n",
4241
"token = open('config/.auth').readline().strip()\n",
4342
"headers = {\n",
4443
" 'Authorization': token\n",
4544
"}\n",
46-
"resultUrl = 'https://api.conveyal.com/api/regional/'\n",
47-
"\n",
48-
"namesFetched = False;\n",
49-
"\n",
50-
"# Cookie copied from browser Devtools Network request header\n",
51-
"cookie = open('config/.cookie').readline().strip()\n",
52-
"dbHeaders = {\n",
53-
" 'Cookie': cookie\n",
54-
"}\n",
55-
"dbUrl = 'https://analysis.conveyal.com/api/db/'"
56-
]
57-
},
58-
{
59-
"cell_type": "markdown",
60-
"metadata": {},
61-
"source": [
62-
"## Fetch names\n",
63-
"Optional, requires saved cookie"
64-
]
65-
},
66-
{
67-
"cell_type": "code",
68-
"execution_count": 2,
69-
"metadata": {},
70-
"outputs": [],
71-
"source": [
72-
"analysesRequest = requests.get(dbUrl + 'regional-analyses?options=' + urllib.parse.urlencode({\"projection\":{\"request.scenario.modifications\":0}}), headers = dbHeaders)\n",
73-
"analyses = analysesRequest.json()\n",
74-
"\n",
75-
"opportunitiesRequest = requests.get(dbUrl + 'opportunityDatasets?', headers = dbHeaders)\n",
76-
"opportunities = opportunitiesRequest.json()\n",
77-
"destinations = {}\n",
78-
"\n",
79-
"for destinationPointSetId in config['pointSetIds']:\n",
80-
" destinationPointSetName = list(filter(lambda x:x[\"_id\"] == destinationPointSetId, opportunities))[0]['name']\n",
81-
" destinations[destinationPointSetId] = destinationPointSetName\n",
82-
"\n",
83-
"namesFetched = True;"
45+
"baseUrl = 'https://api.conveyal.com/api/regional/'"
8446
]
8547
},
8648
{
@@ -100,43 +62,36 @@
10062
"# Loop over analysis ids\n",
10163
"for analysisId in config['analysisIds']:\n",
10264
" \n",
103-
" analysisName = analysisId\n",
104-
" if namesFetched:\n",
105-
" analysisProperties = list(filter(lambda x:x['_id']==analysisId, analyses))[0]\n",
106-
" with open('results/' + analysisId + '.json', 'w') as f:\n",
107-
" json.dump(analysisProperties, f)\n",
108-
" analysisName = analysisProperties['name']\n",
109-
" print('Processing ' + analysisName)\n",
110-
" \n",
111-
" # Loop over cutoffs, percentiles, and destination opportunity pointsets\n",
112-
" for cutoff in config['cutoffs']:\n",
113-
" for percentile in config['percentiles']:\n",
114-
" for destinationPointSetId in config['pointSetIds']:\n",
115-
" \n",
116-
" destinationPointSetName = destinationPointSetId\n",
117-
" if namesFetched:\n",
118-
" destinationPointSetName = destinations[destinationPointSetId]\n",
119-
" \n",
120-
" gridQuery = urllib.parse.urlencode({'cutoff': cutoff, 'percentile': percentile, 'destinationPointSetId': destinationPointSetId})\n",
121-
" gridUrl = resultUrl + analysisId + '/grid/geotiff?' + gridQuery\n",
122-
" \n",
123-
" # Request a signed S3 url from the Conveyal API\n",
124-
" r = requests.get(gridUrl, headers = headers, verify = False)\n",
65+
" print('Processing ' + analysisId)\n",
66+
" status = 202;\n",
12567
" \n",
126-
" if r.status_code == 403:\n",
127-
" print('Unauthorized access. Your authorization token may be invalid or expired.')\n",
68+
" while (status == 202):\n",
69+
" resultUrl = baseUrl + analysisId + '/all'\n",
70+
" # Request a signed S3 url from the Conveyal API\n",
71+
" r = requests.get(resultUrl, headers = headers, verify = False)\n",
72+
" if r.status_code == 403:\n",
73+
" print('Unauthorized access. Your authorization token may be invalid or expired.')\n",
74+
" \n",
75+
" elif r.status_code == 404:\n",
76+
" print('Results not found. Check your analysisId values')\n",
12877
" \n",
129-
" elif r.status_code != 200:\n",
130-
" print('Error: ' + r.text)\n",
78+
" elif r.status_code == 202:\n",
79+
" print(r.text + ' Retrying in 15 seconds.')\n",
80+
" time.sleep(15)\n",
13181
" \n",
132-
" else:\n",
133-
" # From the signed S3 url, fetch the grid as a geotiff\n",
134-
" grid = requests.get(r.json()['url'], verify = False)\n",
135-
" # Save response from Conveyal Analysis to a local .geotiff file\n",
136-
" with open('results/' + ('-').join((analysisId, destinationPointSetName, str(cutoff) + 'min', str(percentile) + 'pct')) + '.geotiff', 'wb') as f:\n",
137-
" for chunk in grid.iter_content(chunk_size=128):\n",
138-
" f.write(chunk)\n",
139-
" f.close()"
82+
" elif (r.status_code != 200):\n",
83+
" print('Error: ' + r.text)\n",
84+
" \n",
85+
" status = r.status_code\n",
86+
" \n",
87+
" if (status == 200):\n",
88+
" zipRequest = requests.get(r.json()['url'], verify = False)\n",
89+
"\n",
90+
" # Save result to a local .zip file\n",
91+
" with open('results/' + analysisId + '.zip', 'wb') as f:\n",
92+
" for chunk in zipRequest.iter_content(chunk_size=128):\n",
93+
" f.write(chunk)\n",
94+
" f.close()"
14095
]
14196
}
14297
],
Lines changed: 2 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,6 @@
11
{
2-
"regionId": "abc123",
32
"analysisIds": [
4-
"def456",
5-
"ghi789"
6-
],
7-
"cutoffs": [
8-
30,
9-
45,
10-
60
11-
],
12-
"pointSetIds": [
13-
"uvw123",
14-
"xyz456"
15-
],
16-
"percentiles": [
17-
25,
18-
50,
19-
75
3+
"abc123",
4+
"def456"
205
]
216
}

0 commit comments

Comments
 (0)