Skip to content

Commit

Permalink
v1.9.0
Browse files Browse the repository at this point in the history
  • Loading branch information
ajnisbet committed Feb 20, 2024
2 parents 1312343 + 0d111ac commit e93ff94
Show file tree
Hide file tree
Showing 15 changed files with 227 additions and 211 deletions.
8 changes: 4 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@ daemon:
docker run --rm -itd --volume "$(shell pwd)/data:/app/data:ro" -p 5000:5000 opentopodata:$(VERSION)

test: build black-check
docker run --rm -e DISABLE_MEMCACHE=1 --volume "$(shell pwd)/htmlcov:/app/htmlcov" opentopodata:$(VERSION) pytest --ignore=data --ignore=scripts --cov=opentopodata --cov-report html
docker run --rm -e DISABLE_MEMCACHE=1 --volume "$(shell pwd)/htmlcov:/app/htmlcov" opentopodata:$(VERSION) python -m pytest --ignore=data --ignore=scripts --cov=opentopodata --cov-report html --timeout=10

test-m1: build-m1 black-check
docker run --rm -e DISABLE_MEMCACHE=1 --volume "$(shell pwd)/htmlcov:/app/htmlcov" opentopodata:$(VERSION) pytest --ignore=data --ignore=scripts --cov=opentopodata --cov-report html
docker run --rm -e DISABLE_MEMCACHE=1 --volume "$(shell pwd)/htmlcov:/app/htmlcov" opentopodata:$(VERSION) python -m pytest --ignore=data --ignore=scripts --cov=opentopodata --cov-report html --timeout=10

run-local:
FLASK_APP=opentopodata/api.py FLASK_DEBUG=1 flask run --port 5000
Expand All @@ -26,11 +26,11 @@ black:
black --target-version py39 tests opentopodata

black-check:
docker run --rm opentopodata:$(VERSION) black --check --target-version py39 tests opentopodata
docker run --rm opentopodata:$(VERSION) python -m black --check --target-version py39 tests opentopodata

update-requirements: build
# pip-compile gets confused if there's already a requirements.txt file, and
# it can't be deleted without breaking the docker mount. So instead do the
# compiling in /tmp. Should run test suite afterwards.
docker run --rm -v $(shell pwd)/requirements.txt:/app/requirements.txt -w /tmp opentopodata:$(VERSION) /bin/bash -c "cp /app/requirements.in .; pip-compile requirements.in; cp requirements.txt /app/requirements.txt"
docker run --rm -v $(shell pwd)/requirements.txt:/app/requirements.txt -w /tmp opentopodata:$(VERSION) /bin/bash -c "cp /app/requirements.in .; pip-compile requirements.in --resolver backtracking; cp requirements.txt /app/requirements.txt"

2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.8.3
1.9.0
10 changes: 5 additions & 5 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
# Container for packages that need to be built from source but have massive dev dependencies.
FROM python:3.9.16-slim-bullseye as builder
FROM python:3.11.8-slim-bookworm as builder
RUN set -e && \
apt-get update && \
apt-get install -y --no-install-recommends \
gcc \
python3.9-dev
python3.11-dev

RUN pip config set global.disable-pip-version-check true && \
pip wheel --wheel-dir=/root/wheels uwsgi==2.0.21 && \
pip wheel --wheel-dir=/root/wheels regex==2022.10.31
pip wheel --wheel-dir=/root/wheels uwsgi==2.0.24 && \
pip wheel --wheel-dir=/root/wheels regex==2023.12.25

# The actual container.
FROM python:3.9.16-slim-bullseye
FROM python:3.11.8-slim-bookworm
RUN set -e && \
apt-get update && \
apt-get install -y --no-install-recommends \
Expand Down
5 changes: 3 additions & 2 deletions docker/apple-silicon.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
# It works just the same as the main image, but is much larger and slower to
# build.

FROM osgeo/gdal:ubuntu-full-3.5.2
FROM ghcr.io/osgeo/gdal:ubuntu-full-3.6.4
RUN python --version
RUN set -e && \
apt-get update && \
apt-get install -y --no-install-recommends \
Expand All @@ -16,7 +17,7 @@ RUN set -e && \
g++ \
supervisor \
libmemcached-dev \
python3.8-dev && \
python3.10-dev && \
rm -rf /var/lib/apt/lists/*

COPY requirements.txt /app/requirements.txt
Expand Down
56 changes: 53 additions & 3 deletions docs/api.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ Latitudes and longitudes should be in `EPSG:4326` (also known as WGS-84 format),
* The default option `null` makes NODATA indistinguishable from a location outside the dataset bounds.
* `NaN` (not a number) values aren't valid in json and will break some clients. The `nan` option was default before version 1.4 and is provided only for backwards compatibility.
* When querying multiple datasets, this NODATA replacement only applies to the last dataset in the stack.
* `format`: Either `json` or `geojson`. Default: `json`.



Expand All @@ -47,14 +48,11 @@ Some notes about the elevation value:
* Unless the `nodata_value` parameter is set, a `null` elevation could either mean the location is outside the dataset bounds, or a NODATA within the raster bounds.



### Example

`GET` <a href="https://api.opentopodata.org/v1/srtm90m?locations=-43.5,172.5|27.6,1.98&interpolation=cubic">api.opentopodata.org/v1/srtm90m?locations=-43.5,172.5|27.6,1.98&interpolation=cubic</a>




```json
{
"results": [
Expand All @@ -79,6 +77,58 @@ Some notes about the elevation value:
}
```


### GeoJSON response

If `format=geojson` is passed, you get a `FeatureCollection` of `Point` geometries instead. Each feature has its elevation as the `z` coordinate, and a `dataset` property specifying the source (corresponding to `results[].dataset` in the regular json response):


### GeoJSON example


`GET` <a href="https://api.opentopodata.org/v1/srtm90m?locations=-43.5,172.5|27.6,1.98&interpolation=cubic&format=geojson">api.opentopodata.org/v1/srtm90m?locations=-43.5,172.5|27.6,1.98&interpolation=cubic&format=geojson</a>




```json
{
"features": [
{
"geometry": {
"coordinates": [
172.5,
-43.5,
45
],
"type": "Point"
},
"properties": {
"dataset": "srtm90m"
},
"type": "Feature"
},
{
"geometry": {
"coordinates": [
1.98,
27.6,
402
],
"type": "Point"
},
"properties": {
"dataset": "srtm90m"
},
"type": "Feature"
}
],
"type": "FeatureCollection"
}
```



---


Expand Down
8 changes: 8 additions & 0 deletions docs/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,14 @@
This is a list of changes to Open Topo Data between each release.


## Version 1.9.0 (19 Feb 2024)
* Dependency upgrades, including python to 3.11 and rasterio to 1.3.9
* Add support for geojson responses ([#86](https://github.com/ajnisbet/opentopodata/pull/86), thanks [@arnesetzer](https://github.com/arnesetzer)!)
* Fix handling of preflight requests ([#93](https://github.com/ajnisbet/opentopodata/issues/93), thanks [@MaRaSu](https://github.com/MaRaSu)!)
* Fix error message bug ([#70](https://github.com/ajnisbet/opentopodata/pull/70), thanks [@khendrickx](https://github.com/khendrickx)!)



## Version 1.8.3 (7 Feb 2023)

* Fix memory leak ([#68](https://github.com/ajnisbet/opentopodata/issues/68))
Expand Down
122 changes: 8 additions & 114 deletions docs/datasets/eudem.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,45 +30,25 @@ The advantage of the `NODATA` oceans is that you cane use EU-DEM without clippin

## Adding EU-DEM to Open Topo Data

As of Jan 2024, EU-DEM is no longer available to download via copernicus.eu.

Make a new folder for the dataset:
I have uploaded my version of the dataset at [https://files.gpxz.io/eudem_buffered.zip](https://files.gpxz.io/eudem_buffered.zip), see [EUDEM download](https://www.gpxz.io/blog/eudem) for more details.

```bash
mkdir ./data/eudem
```

Download the dataset from [Copernicus](https://land.copernicus.eu/imagery-in-situ/eu-dem/eu-dem-v1.1?tab=download). There are 27 files. Unzip them and move all the `.TIF` files into the data folder (you don't need the `.aux.xml`, `.ovr`, or `.TFw` files).

Your data folder should now contain only 27 TIF files:
Download and unzip the folder into:

```bash
ls ./data/eudem

# eu_dem_v11_E00N20.TIF
# eu_dem_v11_E10N00.TIF
# eu_dem_v11_E10N10.TIF
# ...
```


If you have [gdal](https://gdal.org) installed, the easiest thing to do here is build a [VRT](https://gdal.org/drivers/raster/vrt.html) - a single raster file that links to the 27 tiles and which Open Topo Data can treat as a single-file dataset.

```bash
mkdir ./data/eudem-vrt
cd ./data/eudem-vrt
gdalbuildvrt -tr 25 25 -tap -te 0 0 8000000 6000000 eudem.vrt ../eudem/*.TIF
cd ../../
mkdir ./data/eudem
```

The `tr`, `tap`, and `te` options in the above command ensure that slices from the VRT will use the exact values and grid of the source rasters.

There are 27 files.

Then create a `config.yaml` file:

```yaml
datasets:
- name: eudem25m
path: data/eudem-vrt/
path: data/eudem
filename_epsg: 3035
filename_tile_size: 1000000
```
Finally, rebuild to enable the new dataset at [localhost:5000/v1/eudem25m?locations=51.575,-3.220](http://localhost:5000/v1/eudem25m?locations=51.575,-3.220).
Expand All @@ -82,92 +62,6 @@ make build && make run
If you don't have gdal installed, you can use the tiles directly. There are instructions for this [here](https://github.com/ajnisbet/opentopodata/blob/f012ec136bebcd97e1dc05645e91a6d2487127dc/docs/datasets/eudem.md#adding-eu-dem-to-open-topo-data), but because the EU-DEM tiles don't come with an overlap you will get a `null` elevation at locations within 0.5 pixels of tile edges.


### Buffering tiles (optional)

The tiles provided by EU-DEM don't overlap and cover slightly less than a 1000km square. This means you'll get a `null` result for coordinates along the tile edges.

The `.vrt` approach above solves the overlap issue, but for improved performance you can leave the tiles separate and add a buffer to each one. This is the code I used on the public API to do this:


```python
import os
from glob import glob
import subprocess

import rasterio


# Prepare paths.
input_pattern = 'data/eudem/*.TIF'
input_paths = sorted(glob(input_pattern))
assert input_paths
vrt_path = 'data/eudem-vrt/eudem.vrt'
output_dir = 'data/eudem-buffered/'
os.makedirs(output_dir, exist_ok=True)



# EU-DEM specific options.
tile_size = 1_000_000
buffer_size = 50

for input_path in input_paths:

# Get tile bounds.
with rasterio.open(input_path) as f:
bottom = int(f.bounds.bottom)
left = int(f.bounds.left)

# For EU-DEM only: round this partial tile down to the nearest tile_size.
if left == 943750:
left = 0

# New tile name in SRTM format.
output_name = 'N' + str(bottom).zfill(7) + 'E' + str(left).zfill(7) + '.TIF'
output_path = os.path.join(output_dir, output_name)

# New bounds.
xmin = left - buffer_size
xmax = left + tile_size + buffer_size
ymin = bottom - buffer_size
ymax = bottom + tile_size + buffer_size

# EU-DEM tiles don't cover negative locations.
xmin = max(0, xmin)
ymin = max(0, ymin)

# Do the transformation.
cmd = [
'gdal_translate',
'-a_srs', 'EPSG:3035', # EU-DEM crs.
'-co', 'NUM_THREADS=ALL_CPUS',
'-co', 'COMPRESS=DEFLATE',
'-co', 'BIGTIFF=YES',
'--config', 'GDAL_CACHEMAX','512',
'-projwin', str(xmin), str(ymax), str(xmax), str(ymin),
vrt_path, output_path,
]
r = subprocess.run(cmd)
r.check_returncode()
```

These new files can be used in Open Topo Data with the following `config.yaml` file


```yaml
datasets:
- name: eudem25m
path: data/eudem-buffered/
filename_epsg: 3035
filename_tile_size: 1000000
```
and rebuilding:
```bash
make build && make run
```



## Public API
Expand Down
7 changes: 6 additions & 1 deletion docs/notes/kubernetes.md
Original file line number Diff line number Diff line change
Expand Up @@ -82,4 +82,9 @@ spec:
- containerPort: 5000

restartPolicy: Always
```
```
---
Thanks to [@khintz](https://github.com/khintz) for contributing this documentation in [#57](https://github.com/ajnisbet/opentopodata/pull/57)!
4 changes: 4 additions & 0 deletions docs/notes/performance-optimisation.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@ Batch request are faster (per point queried) than single-point requests, and lar

Batch queries are fastest if the points are located next to each other. Sorting the locations you are querying before batching will improve performance. Ideally sort by some block-level attribute like postal code or state/county/region, or by something like `round(lat, 1), round(lon, 1)` depending on your tile size.

If the requests are very large and the server has several CPU cores, try splitting the request and sending it simultaneously. The optimum for the number of requests is slightly higher than the amount of CPU cores used by Open Topo Data. The number of CPU cores used is displayed when OpenTopodata is started. If you missed the log message, you can find iw with the following command:
```bash
docker logs {NAME_OF_CONTAINER} 2>&1 | grep "CPU cores"
```


## Dataset format
Expand Down
13 changes: 9 additions & 4 deletions docs/notes/running-without-docker.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,13 @@ git clone https://github.com/ajnisbet/opentopodata.git
cd opentopodata
```

Install system dependencies
Install system dependencies (if you're not using Debian 10, install whatever python3.X-dev matches your installed python)L

```bash
apt install gcc python3.7-dev python3-pip
```

Debian 10 comes with an old version of pip, it needs to be updated:
Debian 10 comes with an old version of pip, it needs to be updated so we can install wheels:

```bash
pip3 install --upgrade pip
Expand All @@ -38,7 +38,7 @@ cat requirements.txt | grep pyproj
and install that pinned version

```bash
pip3 install pyproj==3.0.0.post1
pip3 install pyproj==3.4.1
```

then the remaining python packages can be installed:
Expand Down Expand Up @@ -133,4 +133,9 @@ Then manage Open Topo Data with
systemctl daemon-reload
systemctl enable opentopodata.service
systemctl start opentopodata.service
```
```

!!! warning "Warning"
Opentopodata caches `config.yaml` in two places: memcache and uwsgi.

If you update the config file (to eg add a new dataset) you'll need to restart memcached **first**, then opentopodata.
2 changes: 2 additions & 0 deletions example-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@
# 400 error will be thrown above this limit.
max_locations_per_request: 100


# CORS header. Should be null for no CORS, '*' for all domains, or a url with
# protocol, domain, and port ('https://api.example.com/'). Default is null.
access_control_allow_origin: "*"


datasets:

# A small testing dataset is included in the repo.
Expand Down
Loading

0 comments on commit e93ff94

Please sign in to comment.