Compare commits

..

No commits in common. "2025-refresh" and "master" have entirely different histories.

483 changed files with 2397 additions and 25489 deletions

56
.env
View File

@ -3,8 +3,8 @@
# Layers definition and meta data # Layers definition and meta data
TILESET_FILE=openmaptiles.yaml TILESET_FILE=openmaptiles.yaml
# Use 3-part patch version to ignore patch updates, e.g. 7.0.0 # Use 3-part patch version to ignore patch updates, e.g. 5.0.0
TOOLS_VERSION=7.1 TOOLS_VERSION=5.3
# Make sure these values are in sync with the ones in .env-postgres file # Make sure these values are in sync with the ones in .env-postgres file
PGDATABASE=openmaptiles PGDATABASE=openmaptiles
@ -16,52 +16,40 @@ PGPORT=5432
# BBOX may get overwritten by the computed bbox of the specific area: # BBOX may get overwritten by the computed bbox of the specific area:
# make generate-bbox-file # make generate-bbox-file
# By default, the Makefile will use the content of data/$(area).bbox file if it exists. # By default, the Makefile will use the content of data/$(area).bbox file if it exists.
#BBOX=4.964926,50.882471,5.411252,51.071236 BBOX=-180.0,-85.0511,180.0,85.0511
#BBOX=4.964926,50.882471,5.411252,51.071236
# Which zooms to generate in make generate-tiles
# Which zooms to generate with make generate-tiles-pg
MIN_ZOOM=0 MIN_ZOOM=0
MAX_ZOOM=16 MAX_ZOOM=7
# `MID_ZOOM` setting only works with `make generate-tiles-pg` command. Make sure MID_ZOOM < MAX_ZOOM.
# See https://github.com/openmaptiles/openmaptiles-tools/pull/383
# MID_ZOOM=11
# Use true (case sensitive) to allow data updates # Use true (case sensitive) to allow data updates
DIFF_MODE=false DIFF_MODE=false
# Hide some output from Mapnik tile generation for clarity
FILTER_MAPNIK_OUTPUT=1
# Some area data like openstreetmap.fr can contain invalid references
# that must be cleaned up before using it for borders -- set it to true.
BORDERS_CLEANUP=false
# The current setup assumes this file is placed inside the data/ dir # The current setup assumes this file is placed inside the data/ dir
MBTILES_FILE=tiles.mbtiles MBTILES_FILE=tiles.mbtiles
# This is the current repl_config.json location, pre-configured in the tools Dockerfile # This is the current repl_config.json location, pre-configured in the tools Dockerfile
# Makefile and quickstart replace it with the dynamically generated one, but we keep it here in case some other method is used to run. # Makefile and quickstart replace it with the dynamically generated one, but we keep it here in case some other method is used to run.
IMPOSM_CONFIG_FILE=/usr/src/app/config/repl_config.json IMPOSM_CONFIG_FILE=/usr/src/app/config/repl_config.json
# import-borders temp files - set them here to defaults, and override in the Makefile based on the area
BORDERS_CLEANUP_FILE=data/borders/cleanup.pbf
BORDERS_PBF_FILE=data/borders/filtered.pbf
BORDERS_CSV_FILE=data/borders/lines.csv
# Number of parallel processes to use when importing sql files # Number of parallel processes to use when importing sql files
MAX_PARALLEL_PSQL=5 MAX_PARALLEL_PSQL=5
#MAX_PARALLEL_PSQL=16
# Number of concurrent IO ops to use when generating vector map tiles # Number of parallel threads to use when generating vector map tiles
# Set to ridiculous high values for SSD/NVME COPY_CONCURRENCY=10
# Test results: (These were without setting UV_THREADPOOL_SIZE)
# 64: ~100/s peak
# 256: ~200/s peak, stabilizes to a 50% improvement over 64 concurrency
# 1024: ~400/s peak, still at 400/s speed after a few minutes (unlike before, oddly enough)
# 4096: ~250/s peak, stabilizes to 200. But it appears to be choking on something, only updates progress periodically
# 1024 & 24 Threads: ~700/s peak - drops to 214s after 10mins, 178/s after 15mins. EST after 20min: 23hours
# Postgres is mainly idle during all of this (coarse dataset), using only half a core
# To be tested if 1 core is dedicated to postgres and the others to tilelive(UV_THREADPOOL_SIZE)
# will prove to be more efficient on average. Or just UV_THREADPOOL_SIZE==CPU_THREADS.
#COPY_CONCURRENCY=10
COPY_CONCURRENCY=64
#COPY_CONCURRENCY=256
#COPY_CONCURRENCY=512
#COPY_CONCURRENCY=1024
#COPY_CONCURRENCY=4096
#UV_THREADPOOL_SIZE=16 # Variables for generate tiles using PGquery
#UV_THREADPOOL_SIZE=24
# Variables for generate tiles using tilelive-pgquery
PGHOSTS_LIST= PGHOSTS_LIST=
NO_GZIP=1
USE_KEY_COLUMN=1

8
.env-postgres Normal file
View File

@ -0,0 +1,8 @@
# This file defines environment variables for the PostgreSQL image.
# The main docker PostgreSQL image requires these vars rather than
# the standard PG* ones that all PostgreSQL tools use.
# Make sure these values are in sync with the ones in .env file
POSTGRES_DB=openmaptiles
POSTGRES_USER=openmaptiles
POSTGRES_PASSWORD=openmaptiles

View File

@ -1,71 +0,0 @@
# Workflow to run basic integrity checks on OMT`s new Pull Requests and commits pushed into OMT repo
name: OpenMapTiles Integrity CI
on:
push:
branches: [ master, master-tools ]
pull_request:
jobs:
integrity_test:
name: Run integrity test
runs-on: ubuntu-latest
steps:
- name: Checkout the changes
uses: actions/checkout@v4
- name: Run quickstart for a small area
env:
area: monaco
QUIET: 1
run: |
echo MIN_ZOOM=0 >> .env
echo MAX_ZOOM=14 >> .env
./quickstart.sh $area
- name: Save quickstart.log
uses: actions/upload-artifact@v4
with:
name: quickstart.log
path: quickstart.log
- name: Test etldoc images
run: |
export TEST_MODE=yes
make generate-devdoc
- name: Run quickstart and update in DIFF mode
env:
area: europe/monaco
QUIET: 1
run: |
echo MIN_ZOOM=0 >> .env
echo MAX_ZOOM=14 >> .env
echo DIFF_MODE=true >> .env
# Cleanup
rm -fr data build cache
# Create data/$area.repl.json
make download-geofabrik area=$area
# Download 2+ month old data
export old_date=$(date --date="$(date +%Y-%m-15) -2 month" +'%y%m01')
echo Downloading $old_date extract of $area
docker compose run --rm --user=$(id -u):$(id -g) openmaptiles-tools sh -c "wget -O data/$area.osm.pbf http://download.geofabrik.de/$area-$old_date.osm.pbf"
# Initial import and tile generation
./quickstart.sh $area
sleep 2
echo Downloading updates
# Loop to recover from potential "ERROR 429: Too Many Requests"
docker compose run --rm --user=$(id -u):$(id -g) openmaptiles-tools sh -c "
while ! osmupdate --keep-tempfiles --base-url=$(sed -n 's/ *\"replication_url\": //p' data/$area.repl.json) data/$area.osm.pbf data/changes.osc.gz ; do
sleep 2;
echo Sleeping...;
sleep 630;
done"
echo Downloading updates completed
echo Importing updates
make import-diff
echo Generating new tiles
make generate-tiles-pg

View File

@ -1,20 +1,21 @@
name: Update PR comments name: Update PR comments
on: on:
workflow_run: # This number should correspond to the IGNORE_RUNS_OLDER_THAN value below.
workflows: ["OpenMapTiles Performance CI"] # When setting up for the first time, use "on: push" instead of "on: schedule"
types: [completed] # and set IGNORE_RUNS_OLDER_THAN to a very high number until it runs once.
schedule:
- cron: '*/6 * * * *'
jobs: jobs:
update_PRs: update_PRs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: ${{ github.event.workflow_run.conclusion == 'success' }}
steps: steps:
- name: main - name: main
env: env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
WORKFLOW_NAME: "OpenMapTiles Performance CI" WORKFLOW_NAME: "OpenMapTiles CI"
# the name of the artifact whose content comment published by PR. Must have a single markdown file inside. # the name of the artifact whose content comment published by PR. Must have a single markdown file inside.
MSG_ARTIFACT_NAME: "pr_message" MSG_ARTIFACT_NAME: "pr_message"
# How far back to look for finished runs, in minutes. # How far back to look for finished runs, in minutes.

View File

@ -1,22 +0,0 @@
# Workflow to run unit tests on OMT`s new Pull Requests and commits pushed into OMT repo
name: OpenMapTiles SQL Test CI
on:
push:
branches: [ master, master-tools ]
pull_request:
jobs:
unit_tests:
name: Run unit test
runs-on: ubuntu-latest
steps:
- name: Checkout the changes
uses: actions/checkout@v4
- name: Run unit tests
run: |
make clean && make test-sql

View File

@ -1,6 +1,6 @@
# Workflow to run performance tests OMT`s new Pull Requests and commits pushed into OMT repo # Workflow to validate OMT`s new Pull Requests and commits pushed into OMT repo
name: OpenMapTiles Performance CI name: OpenMapTiles CI
on: on:
push: push:
@ -8,6 +8,35 @@ on:
pull_request: pull_request:
jobs: jobs:
integrity_test:
name: Run integrity test
runs-on: ubuntu-latest
steps:
- name: Checkout the changes
uses: actions/checkout@v2
- name: Run quickstart for a small area
env:
area: monaco
MIN_ZOOM: 0
MAX_ZOOM: 14
QUIET: 1
run: |
./quickstart.sh $area
- name: Save quickstart.log
uses: actions/upload-artifact@v1
with:
name: quickstart.log
path: quickstart.log
- name: Test etldoc images
run: |
export TEST_MODE=yes
make generate-devdoc
performance: performance:
name: Evaluate performance name: Evaluate performance
runs-on: self-hosted runs-on: self-hosted
@ -34,18 +63,9 @@ jobs:
# TEST_DATA_URL: "https://drive.google.com/uc?export=download&id=1kw7XPDPd1Rc-Zi2XxGLTXdinUSq-S4pT" # TEST_DATA_URL: "https://drive.google.com/uc?export=download&id=1kw7XPDPd1Rc-Zi2XxGLTXdinUSq-S4pT"
# TEST_PERF_PARAMS: "--minzoom 0 --maxzoom 14 --test hungary --test isle-of-man" # TEST_PERF_PARAMS: "--minzoom 0 --maxzoom 14 --test hungary --test isle-of-man"
steps: steps:
- name: Cleanup workdir
id: cleanup
run: |
set -euo pipefail
pwd
ls -al .
shopt -s dotglob
rm -rf *
- name: Cache test data download - name: Cache test data download
id: cache-testdata id: cache-testdata
uses: actions/cache@v4 uses: actions/cache@v1
with: with:
path: ci_cache path: ci_cache
key: "v2-${{ env.TEST_DATA_URL }}" key: "v2-${{ env.TEST_DATA_URL }}"
@ -58,7 +78,7 @@ jobs:
curl --silent --show-error --location --output ci_cache/perf-test-areas-latest.osm.pbf "$TEST_DATA_URL" curl --silent --show-error --location --output ci_cache/perf-test-areas-latest.osm.pbf "$TEST_DATA_URL"
- name: Get code - name: Get code
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Fetch the last two commits in case this is a PR, # Fetch the last two commits in case this is a PR,
# and we need to profile the base branch first # and we need to profile the base branch first
@ -76,14 +96,14 @@ jobs:
# Take the first parent of the grafted commit (cannot use HEAD^1 with shallow clones) # Take the first parent of the grafted commit (cannot use HEAD^1 with shallow clones)
REV_HASH=$(git cat-file -p $REV_HASH | awk 'NR > 1 {if(/^parent/){print $2; exit}}') REV_HASH=$(git cat-file -p $REV_HASH | awk 'NR > 1 {if(/^parent/){print $2; exit}}')
fi fi
echo "hash=$REV_HASH" >> $GITHUB_OUTPUT echo "::set-output name=hash::$REV_HASH"
- name: Set up caching for the performance results - name: Set up caching for the performance results
uses: actions/cache@v4 uses: actions/cache@v1
with: with:
path: perf_cache path: perf_cache
# If profiling result cache has incompatible format, increase this "v" number # If profiling result cache has incompatible format, increase this "v" number
key: "v13-${{ steps.calc.outputs.hash }}-${{ env.TEST_DATA_URL }}" key: "v12-${{ steps.calc.outputs.hash }}-${{ env.TEST_DATA_URL }}"
- name: Load test data into DB and run performance test - name: Load test data into DB and run performance test
id: main id: main
@ -153,23 +173,27 @@ jobs:
make start-db make start-db
profile 1_data make import-data profile 1_data make import-data
profile 2_osm make import-osm profile 2_osm make import-osm
profile 3_borders make import-borders
if [ -f ../ci_cache/wikidata-cache.json ]; then if [ -f ../ci_cache/wikidata-cache.json ]; then
cp ../ci_cache/wikidata-cache.json cache/wikidata-cache.json cp ../ci_cache/wikidata-cache.json cache/wikidata-cache.json
fi fi
profile 3_wikidata make import-wikidata profile 4_wikidata make import-wikidata
profile 4_sql make import-sql profile 5_sql make import-sql
# Get database total size, in MB # Get database total size, in MB
# Once Makefile has a few more improvements, we can use this approach instead: # Once Makefile has a few more improvements, we can use this approach instead:
# echo $'\\set QUIET on \\a \\x off \\t \\\\ select pg_database_size(current_database())/1024/1024;' | make -s psql # echo $'\\set QUIET on \\a \\x off \\t \\\\ select pg_database_size(current_database())/1024/1024;' | make -s psql
DB_SIZE_MB=$(docker compose run --rm -u $(id -u):$(id -g) openmaptiles-tools psql.sh -qtAc 'select pg_database_size(current_database())/1024/1024;') if grep -qE '^ import-osm:$' docker-compose.yml; then
docker compose run --rm -u $(id -u):$(id -g) openmaptiles-tools pg_dump --schema-only > "${PROFILE_DIR}/schema.sql" # old version using dedicated import-osm docker image
DB_SIZE_MB=$(docker-compose run --rm -u $(id -u):$(id -g) import-osm ./psql.sh -qtAc 'select pg_database_size(current_database())/1024/1024;')
else
DB_SIZE_MB=$(docker-compose run --rm -u $(id -u):$(id -g) openmaptiles-tools psql.sh -qtAc 'select pg_database_size(current_database())/1024/1024;')
fi
docker-compose run --rm -u $(id -u):$(id -g) openmaptiles-tools pg_dump --schema-only > "${PROFILE_DIR}/schema.sql"
echo "$DB_SIZE_MB" > "${PROFILE_DIR}/db_size.tsv" echo "$DB_SIZE_MB" > "${PROFILE_DIR}/db_size.tsv"
} }
echo "Ensuring we have the needed dirs"
pwd
mkdir -p perf_cache mkdir -p perf_cache
mkdir -p artifacts mkdir -p artifacts
mkdir -p pr_message mkdir -p pr_message
@ -183,18 +207,15 @@ jobs:
git reset --hard ${CURRENT_SHA}^1 git reset --hard ${CURRENT_SHA}^1
fi fi
docker compose pull
PROFILE_DIR=../perf_cache PROFILE_DIR=../perf_cache
create_db create_db
if [ ! -f ../ci_cache/wikidata-cache.json ]; then if [ ! -f ../ci_cache/wikidata-cache.json ]; then
cp cache/wikidata-cache.json ../ci_cache/wikidata-cache.json cp cache/wikidata-cache.json ../ci_cache/wikidata-cache.json
fi fi
(set -x; profile test-perf docker compose run --rm -T openmaptiles-tools \ (set -x; profile test-perf docker-compose run --rm -T openmaptiles-tools \
test-perf openmaptiles.yaml $TEST_PERF_PARAMS \ test-perf openmaptiles.yaml $TEST_PERF_PARAMS \
--record /tileset/results.json) --record /tileset/results.json)
echo "Done generating base perf results, moving them to ../perf_cache"
pwd
mv results.json ../perf_cache mv results.json ../perf_cache
if [ "$GITHUB_EVENT_NAME" = "pull_request" ]; then if [ "$GITHUB_EVENT_NAME" = "pull_request" ]; then
@ -205,10 +226,7 @@ jobs:
echo "Found cached performance results" echo "Found cached performance results"
fi fi
docker compose pull
pushd ../perf_cache pushd ../perf_cache
echo "Should be in perf_cache"
pwd
if [ "$GITHUB_EVENT_NAME" = "pull_request" ]; then if [ "$GITHUB_EVENT_NAME" = "pull_request" ]; then
cp results.json ../artifacts/base-results.json cp results.json ../artifacts/base-results.json
# Copy all tsv files, not just the ones with "profile-" prefix. # Copy all tsv files, not just the ones with "profile-" prefix.
@ -228,10 +246,8 @@ jobs:
PROFILE_DIR=../artifacts PROFILE_DIR=../artifacts
create_db create_db
echo "Copying existing perf_cache results to current dir"
pwd
cp ../perf_cache/results.json . cp ../perf_cache/results.json .
OUTPUT="$(set -x; profile test-perf docker compose run --rm -T openmaptiles-tools \ OUTPUT="$(set -x; profile test-perf docker-compose run --rm -T openmaptiles-tools \
test-perf openmaptiles.yaml $TEST_PERF_PARAMS \ test-perf openmaptiles.yaml $TEST_PERF_PARAMS \
--compare /tileset/results.json --record /tileset/pr-results.json)" --compare /tileset/results.json --record /tileset/pr-results.json)"
rm results.json rm results.json
@ -287,14 +303,14 @@ jobs:
fi fi
- name: Save performance artifacts - name: Save performance artifacts
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v1
with: with:
name: performance_results name: performance_results
path: artifacts path: artifacts
- name: Save PR message artifact - name: Save PR message artifact
if: github.event_name == 'pull_request' if: github.event_name == 'pull_request'
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v1
with: with:
name: pr_message name: pr_message
path: pr_message path: pr_message

1
.gitignore vendored
View File

@ -10,7 +10,6 @@ quickstart.log
# imput / output data # imput / output data
data/* data/*
data.backup/*
# generated source files # generated source files
build/* build/*

View File

@ -1,6 +1,6 @@
# Introduction # Introduction
Thank you for considering contributing to OpenMapTiles. It's people like you that make OpenMapTiles such a great project. Talk to us at the OSM Slack **#openmaptiles** channel ([join](https://slack.openstreetmap.us/)). Thank you for considering contributing to OpenMapTiles. It's people like you that make OpenMapTiles such a great project. Talk to us at the OSM Slack **#openmaptiles** channel ([join](https://osmus-slack.herokuapp.com/)).
Following these guidelines helps to communicate that you respect the time of the developers managing and developing this open source project. In return, they should reciprocate that respect in addressing your issue, assessing changes, and helping you finalize your pull requests. Following these guidelines helps to communicate that you respect the time of the developers managing and developing this open source project. In return, they should reciprocate that respect in addressing your issue, assessing changes, and helping you finalize your pull requests.
@ -41,48 +41,3 @@ When you modify import data rules in `mapping.yaml` or `*.sql`, please update:
5. check if OMT styles are affected by the PR and if there is a need for style updates 5. check if OMT styles are affected by the PR and if there is a need for style updates
When you are making PR that adds new spatial features to OpenMapTiles schema, please make also PR for at least one of our GL styles to show it on the map. Visual check is crucial. When you are making PR that adds new spatial features to OpenMapTiles schema, please make also PR for at least one of our GL styles to show it on the map. Visual check is crucial.
# SQL unit testing
It is recommended that you create a [unit test](TESTING.md) when modifying the behavior of the SQL layer. This will ensure that your changes are working as expected when importing or updating OSM data into an OpenMapTiles database.
# Verifying that updates still work
When testing a PR, you should also verify that the update process completes without an error. Please modify, if necessary, and run the script below.
**Note:**
The verification requires the script to append temporary changes to the `.env` file. Please restore the original version from git using `git checkout .env` or remove these changes before submitting a PR.
```
(
set -e
cat >> .env << EOM
# temporary changes for verifying that updates still work
# Ensure DIFF_MODE is active
DIFF_MODE=true
# Ensure all zoom levels are tested
MAX_ZOOM=14
EOM
# Set the test area to the appropriate geofabrik extract
export area=north-america/us/indiana
# Build 1-month-old tiles
rm -fr data build cache
make destroy-db
make download-geofabrik area=$area
docker-compose run --rm --user=$(id -u):$(id -g) openmaptiles-tools sh -c "wget -nv -O data/$area.osm.pbf http://download.geofabrik.de/$area-$(date --date="$(date +%Y-%m-15) -1 month" +'%y%m01').osm.pbf"
./quickstart.sh $area
cat << EOM
# Update with the changes since a month+ ago
EOM
docker-compose run --rm --user=$(id -u):$(id -g) openmaptiles-tools sh -c "osmupdate --base-url=$(sed -n 's/ *\"replication_url\": //p' data/$area.repl.json) data/$area.osm.pbf data/changes.osc.gz"
make import-diff
make generate-tiles-pg
) < /dev/null
```

View File

@ -1,4 +1,4 @@
Copyright (c) 2024, MapTiler.com & OpenMapTiles contributors. Copyright (c) 2016, KlokanTech.com & OpenMapTiles contributors.
All rights reserved. All rights reserved.
The vector tile schema has been developed by Klokan Technologies GmbH and The vector tile schema has been developed by Klokan Technologies GmbH and
@ -55,6 +55,6 @@ For printed and static maps a similar attribution should be made in a textual
description near the image, in the same fashion as if you cite a photograph. description near the image, in the same fashion as if you cite a photograph.
Exceptions to OpenMapTiles attribution requirement can be in a written form granted Exceptions to OpenMapTiles attribution requirement can be in a written form granted
by MapTiler (info@maptiler.com). by Klokan Technologies GmbH (info@klokantech.com).
The project contributors grant MapTiler AG the license to give such The project contributors grant Klokan Technologies GmbH the license to give such
exceptions on a commercial basis. exceptions on a commercial basis.

View File

@ -1,15 +0,0 @@
#### amenity: parking_entrance
is nog niet covered:
https://www.openstreetmap.org/node/3192139608
http://localhost:8080/styles/light/?vector#18.53/50.8813949/4.707086
### While we're at it, add trees
As a near-max zoom level thing only.
https://www.openstreetmap.org/node/3192142615
Would need to be a new layer to style correctly,
and other such 'decorations' could be considered.
(currently, bins, park-benches, etc are considered regular POI while these
would fit in their own 'low-rank' category)
![miel_screenshot.png](miel_screenshot.png)

375
Makefile
View File

@ -6,8 +6,11 @@
SHELL = /bin/bash SHELL = /bin/bash
.SHELLFLAGS = -o pipefail -c .SHELLFLAGS = -o pipefail -c
# Make all .env variables available for make targets
include .env
# Layers definition and meta data # Layers definition and meta data
TILESET_FILE := $(or $(TILESET_FILE),$(shell (. .env; echo $${TILESET_FILE})),openmaptiles.yaml) TILESET_FILE ?= openmaptiles.yaml
# Options to run with docker and docker-compose - ensure the container is destroyed on exit # Options to run with docker and docker-compose - ensure the container is destroyed on exit
# Containers run as the current user rather than root (so that created files are not root-owned) # Containers run as the current user rather than root (so that created files are not root-owned)
@ -25,30 +28,17 @@ export PPORT
# Local port to use with tileserver # Local port to use with tileserver
TPORT ?= 8081 TPORT ?= 8081
export TPORT export TPORT
STYLE_FILE := build/style/style.json
STYLE_HEADER_FILE := style/style-header.json
# Support newer `docker compose` syntax in addition to `docker-compose`
ifeq (, $(shell which docker-compose))
DOCKER_COMPOSE_COMMAND := docker compose
$(info Using docker compose V2 (docker compose))
else
DOCKER_COMPOSE_COMMAND := docker-compose
$(info Using docker compose V1 (docker-compose))
endif
# Allow a custom docker-compose project name # Allow a custom docker-compose project name
DC_PROJECT := $(or $(DC_PROJECT),$(shell (. .env; echo $${DC_PROJECT}))) ifeq ($(strip $(DC_PROJECT)),)
ifeq ($(DC_PROJECT),)
DC_PROJECT := $(notdir $(shell pwd)) DC_PROJECT := $(notdir $(shell pwd))
DOCKER_COMPOSE := $(DOCKER_COMPOSE_COMMAND) DOCKER_COMPOSE := docker-compose
else else
DOCKER_COMPOSE := $(DOCKER_COMPOSE_COMMAND) --project-name $(DC_PROJECT) DOCKER_COMPOSE := docker-compose --project-name $(DC_PROJECT)
endif endif
# Make some operations quieter (e.g. inside the test script) # Make some operations quieter (e.g. inside the test script)
ifeq ($(or $(QUIET),$(shell (. .env; echo $${QUIET})))),) ifeq ($(strip $(QUIET)),)
QUIET_FLAG := QUIET_FLAG :=
else else
QUIET_FLAG := --quiet QUIET_FLAG := --quiet
@ -68,7 +58,8 @@ else
endif endif
# Set OpenMapTiles host # Set OpenMapTiles host
export OMT_HOST := http://$(firstword $(subst :, ,$(subst tcp://,,$(DOCKER_HOST))) localhost) OMT_HOST := http://$(firstword $(subst :, ,$(subst tcp://,,$(DOCKER_HOST))) localhost)
export OMT_HOST
# This defines an easy $(newline) value to act as a "\n". Make sure to keep exactly two empty lines after newline. # This defines an easy $(newline) value to act as a "\n". Make sure to keep exactly two empty lines after newline.
define newline define newline
@ -76,12 +67,6 @@ define newline
endef endef
# Use the old Postgres connection values as a fallback
PGHOST := $(or $(PGHOST),$(shell (. .env; echo $${PGHOST})),$(POSTGRES_HOST),$(shell (. .env; echo $${POSTGRES_HOST})),postgres)
PGPORT := $(or $(PGPORT),$(shell (. .env; echo $${PGPORT})),$(POSTGRES_PORT),$(shell (. .env; echo $${POSTGRES_PORT})),postgres)
PGDATABASE := $(or $(PGDATABASE),$(shell (. .env; echo $${PGDATABASE})),$(POSTGRES_DB),$(shell (. .env; echo $${POSTGRES_DB})),postgres)
PGUSER := $(or $(PGUSER),$(shell (. .env; echo $${PGUSER})),$(POSTGRES_USER),$(shell (. .env; echo $${POSTGRES_USER})),postgres)
PGPASSWORD := $(or $(PGPASSWORD),$(shell (. .env; echo $${PGPASSWORD})),$(POSTGRES_PASSWORD),$(shell (. .env; echo $${POSTGRES_PASSWORD})),postgres)
# #
# Determine area to work on # Determine area to work on
@ -94,7 +79,7 @@ PGPASSWORD := $(or $(PGPASSWORD),$(shell (. .env; echo $${PGPASSWORD})),$(POSTGR
# historically we have been using $(area) rather than $(AREA), so make both work # historically we have been using $(area) rather than $(AREA), so make both work
area ?= $(AREA) area ?= $(AREA)
# Ensure the $(area) param is set, or try to automatically determine it based on available data files # Ensure the $(area) param is set, or try to automatically determine it based on available data files
ifeq ($(area),) ifeq ($(strip $(area)),)
# An $(area) parameter is not set. If only one *.osm.pbf file is found in ./data, use it as $(area). # An $(area) parameter is not set. If only one *.osm.pbf file is found in ./data, use it as $(area).
data_files := $(shell find data -name '*.osm.pbf' 2>/dev/null) data_files := $(shell find data -name '*.osm.pbf' 2>/dev/null)
ifneq ($(word 2,$(data_files)),) ifneq ($(word 2,$(data_files)),)
@ -139,7 +124,7 @@ ifeq ($(area),)
endif endif
endif endif
ifneq ($(AREA_INFO),) ifneq ($(strip $(AREA_INFO)),)
define assert_area_is_given define assert_area_is_given
@echo "$(AREA_INFO)" @echo "$(AREA_INFO)"
endef endef
@ -149,17 +134,25 @@ endif
PBF_FILE ?= data/$(area).osm.pbf PBF_FILE ?= data/$(area).osm.pbf
# For download-osm, allow URL parameter to download file from a given URL. Area param must still be provided. # For download-osm, allow URL parameter to download file from a given URL. Area param must still be provided.
DOWNLOAD_AREA := $(or $(url), $(area)) ifneq ($(strip $(url)),)
DOWNLOAD_AREA := $(url)
else
DOWNLOAD_AREA := $(area)
endif
# The mbtiles file is placed into the $EXPORT_DIR=/export (mapped to ./data) # import-borders uses these temp files during border parsing/import
MBTILES_FILE := $(or $(MBTILES_FILE),$(shell (. .env; echo $${MBTILES_FILE})),$(area).mbtiles) export BORDERS_CLEANUP_FILE ?= data/borders/$(area).cleanup.pbf
export BORDERS_PBF_FILE ?= data/borders/$(area).filtered.pbf
export BORDERS_CSV_FILE ?= data/borders/$(area).lines.csv
# The file is placed into the $EXPORT_DIR=/export (mapped to ./data)
export MBTILES_FILE ?= $(area).mbtiles
MBTILES_LOCAL_FILE = data/$(MBTILES_FILE) MBTILES_LOCAL_FILE = data/$(MBTILES_FILE)
DIFF_MODE := $(or $(DIFF_MODE),$(shell (. .env; echo $${DIFF_MODE}))) ifeq ($(strip $(DIFF_MODE)),true)
ifeq ($(DIFF_MODE),true)
# import-osm implementation requires IMPOSM_CONFIG_FILE to be set to a valid file # import-osm implementation requires IMPOSM_CONFIG_FILE to be set to a valid file
# For one-time only imports, the default value is fine. # For static (no-updates) import, we don't need to override the default value
# For diff mode updates, use the dynamically-generated area-based config file # For the update mode, set location of the dynamically-generated area-based config file
export IMPOSM_CONFIG_FILE = data/$(area).repl.json export IMPOSM_CONFIG_FILE = data/$(area).repl.json
endif endif
@ -171,119 +164,90 @@ ifneq (,$(wildcard $(AREA_BBOX_FILE)))
export BBOX export BBOX
endif endif
# Consult .env if needed ifeq ($(strip $(area)),)
MIN_ZOOM := $(or $(MIN_ZOOM),$(shell (. .env; echo $${MIN_ZOOM})),0) define assert_area_is_given
MAX_ZOOM := $(or $(MAX_ZOOM),$(shell (. .env; echo $${MAX_ZOOM})),7) @echo ""
PPORT := $(or $(PPORT),$(shell (. .env; echo $${PPORT})),7) @echo "ERROR: $(AREA_ERROR)"
TPORT := $(or $(TPORT),$(shell (. .env; echo $${TPORT})),7) @echo ""
@echo " make $@ area=<area-id>"
define HELP_MESSAGE @echo ""
============================================================================== @echo "To download an area, use make download <area-id>"
OpenMapTiles https://github.com/openmaptiles/openmaptiles @echo "To list downloadable areas, use make list-geofabrik and/or make list-bbbike"
@exit 1
Hints for testing areas endef
make list-geofabrik # list actual geofabrik OSM extracts for download -> <<your-area>> else
./quickstart.sh <<your-area>> # example: ./quickstart.sh madagascar ifneq ($(strip $(AREA_INFO)),)
define assert_area_is_given
Hints for designers: @echo "$(AREA_INFO)"
make start-maputnik # start Maputnik Editor + dynamic tile server [ see $(OMT_HOST):8088 ] endef
make stop-maputnik # stop Maputnik Editor + dynamic tile server endif
make start-postserve # start dynamic tile server [ see $(OMT_HOST):$(PPORT) ] endif
make stop-postserve # stop dynamic tile server
make start-tileserver # start maptiler/tileserver-gl [ see $(OMT_HOST):$(TPORT) ]
make stop-tileserver # stop maptiler/tileserver-gl
Hints for developers:
make # build source code
make bash # start openmaptiles-tools /bin/bash terminal
make generate-bbox-file # compute bounding box of a data file and store it in a file
make generate-devdoc # generate devdoc including graphs for all layers [./layers/...]
make generate-qa # statistics for a given layer's field
make generate-tiles-pg # generate vector tiles based on .env settings using PostGIS ST_MVT()
make generate-tiles # generate vector tiles based on .env settings using Mapnik (obsolete)
make generate-changed-tiles # Generate tiles changed by import-diff
make test-sql # run unit tests on the OpenMapTiles SQL schema
cat .env # list PG database and MIN_ZOOM and MAX_ZOOM information
cat quickstart.log # transcript of the last ./quickstart.sh run
make help # help about available commands
Hints for downloading & importing data:
make list-geofabrik # list actual geofabrik OSM extracts for download
make list-bbbike # list actual BBBike OSM extracts for download
make download area=albania # download OSM data from any source and create config file
make download-geofabrik area=albania # download OSM data from geofabrik.de and create config file
make download-osmfr area=asia/qatar # download OSM data from openstreetmap.fr and create config file
make download-bbbike area=Amsterdam # download OSM data from bbbike.org and create config file
make import-data # Import data from OpenStreetMapData, Natural Earth and OSM Lake Labels.
make import-osm # Import OSM data with the mapping rules from build/mapping.yaml
make import-diff # Import OSM updates from data/changes.osc.gz
make import-wikidata # Import labels from Wikidata
make import-sql # Import layers (run this after modifying layer SQL)
Hints for database management:
make psql # start PostgreSQL console
make psql-list-tables # list all PostgreSQL tables
make list-views # list PostgreSQL public schema views
make list-tables # list PostgreSQL public schema tables
make vacuum-db # PostgreSQL: VACUUM ANALYZE
make analyze-db # PostgreSQL: ANALYZE
make destroy-db # remove docker containers and PostgreSQL data volume
make start-db # start PostgreSQL, creating it if it doesn't exist
make start-db-preloaded # start PostgreSQL, creating data-prepopulated one if it doesn't exist
make stop-db # stop PostgreSQL database without destroying the data
Hints for Docker management:
make clean-unnecessary-docker # clean unnecessary docker image(s) and container(s)
make refresh-docker-images # refresh openmaptiles docker images from Docker HUB
make remove-docker-images # remove openmaptiles docker images
make list-docker-images # show a list of available docker images
==============================================================================
endef
export HELP_MESSAGE
# #
# TARGETS # TARGETS
# #
.PHONY: all .PHONY: all
all: init-dirs build/openmaptiles.tm2source/data.yml build/mapping.yaml build-sql build-style all: init-dirs build/openmaptiles.tm2source/data.yml build/mapping.yaml build-sql
.PHONY: help .PHONY: help
help: help:
@echo "$$HELP_MESSAGE" | less @echo "=============================================================================="
@echo " OpenMapTiles https://github.com/openmaptiles/openmaptiles "
define win_fs_error @echo "Hints for testing areas "
( \ @echo " make list-geofabrik # list actual geofabrik OSM extracts for download -> <<your-area>> "
echo "" ;\ @echo " ./quickstart.sh <<your-area>> # example: ./quickstart.sh madagascar "
echo "ERROR: Windows native filesystem" ;\ @echo " "
echo "" ;\ @echo "Hints for designers:"
echo "Please avoid running OpenMapTiles in a Windows filesystem." ;\ @echo " make start-maputnik # start Maputnik Editor + dynamic tile server [ see $(OMT_HOST):8088 ]"
echo "See https://github.com/openmaptiles/openmaptiles/issues/1095#issuecomment-817095465" ;\ @echo " make start-postserve # start dynamic tile server [ see $(OMT_HOST):$(PPORT) ]"
echo "" ;\ @echo " make start-tileserver # start maptiler/tileserver-gl [ see $(OMT_HOST):$(TPORT) ]"
exit 1 ;\ @echo " "
) @echo "Hints for developers:"
endef @echo " make # build source code"
@echo " make list-geofabrik # list actual geofabrik OSM extracts for download"
@echo " make list-bbbike # list actual BBBike OSM extracts for download"
@echo " make download area=albania # download OSM data from any source and create config file"
@echo " make download-geofabrik area=albania # download OSM data from geofabrik.de and create config file"
@echo " make download-osmfr area=asia/qatar # download OSM data from openstreetmap.fr and create config file"
@echo " make download-bbbike area=Amsterdam # download OSM data from bbbike.org and create config file"
@echo " make generate-bbox-file # compute bounding box of a data file and store it in a file"
@echo " make psql # start PostgreSQL console"
@echo " make psql-list-tables # list all PostgreSQL tables"
@echo " make vacuum-db # PostgreSQL: VACUUM ANALYZE"
@echo " make analyze-db # PostgreSQL: ANALYZE"
@echo " make generate-qa # statistics for a given layer's field"
@echo " make generate-devdoc # generate devdoc including graphs for all layers [./layers/...]"
@echo " make bash # start openmaptiles-tools /bin/bash terminal"
@echo " make destroy-db # remove docker containers and PostgreSQL data volume"
@echo " make start-db # start PostgreSQL, creating it if it doesn't exist"
@echo " make start-db-preloaded # start PostgreSQL, creating data-prepopulated one if it doesn't exist"
@echo " make stop-db # stop PostgreSQL database without destroying the data"
@echo " make clean-unnecessary-docker # clean unnecessary docker image(s) and container(s)"
@echo " make refresh-docker-images # refresh openmaptiles docker images from Docker HUB"
@echo " make remove-docker-images # remove openmaptiles docker images"
@echo " make list-views # list PostgreSQL public schema views"
@echo " make list-tables # list PostgreSQL public schema tables"
@echo " cat .env # list PG database and MIN_ZOOM and MAX_ZOOM information"
@echo " cat quickstart.log # transcript of the last ./quickstart.sh run"
@echo " make help # help about available commands"
@echo "=============================================================================="
.PHONY: init-dirs .PHONY: init-dirs
init-dirs: init-dirs:
@mkdir -p build/sql/parallel @mkdir -p build/sql/parallel
@mkdir -p build/openmaptiles.tm2source @mkdir -p build/openmaptiles.tm2source
@mkdir -p build/style @mkdir -p data/borders
@mkdir -p data
@mkdir -p cache @mkdir -p cache
@ ! ($(DOCKER_COMPOSE) 2>/dev/null run $(DC_OPTS) openmaptiles-tools df --output=fstype /tileset| grep -q 9p) < /dev/null || ($(win_fs_error))
build/openmaptiles.tm2source/data.yml: init-dirs build/openmaptiles.tm2source/data.yml: init-dirs
ifeq (,$(wildcard build/openmaptiles.tm2source/data.yml)) ifeq (,$(wildcard build/openmaptiles.tm2source/data.yml))
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools bash -c \ $(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools generate-tm2source $(TILESET_FILE) --host="postgres" --port=5432 --database="openmaptiles" --user="$(DC_USER)" --password="$(DC_PASSWORD)" > $@
'generate-tm2source $(TILESET_FILE) > $@'
#OLD: $(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools generate-tm2source $(TILESET_FILE) --host="postgres" --port=5432 --database="openmaptiles" --user="$(DC_USER)" --password="$(DC_PASSWORD)" > $@
endif endif
build/mapping.yaml: init-dirs build/mapping.yaml: init-dirs
ifeq (,$(wildcard build/mapping.yaml)) ifeq (,$(wildcard build/mapping.yaml))
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools bash -c \ $(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools generate-imposm3 $(TILESET_FILE) > $@
'generate-imposm3 $(TILESET_FILE) > $@'
endif endif
.PHONY: build-sql .PHONY: build-sql
@ -293,46 +257,22 @@ ifeq (,$(wildcard build/sql/run_last.sql))
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools bash -c \ $(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools bash -c \
'generate-sql $(TILESET_FILE) --dir ./build/sql \ 'generate-sql $(TILESET_FILE) --dir ./build/sql \
&& generate-sqltomvt $(TILESET_FILE) \ && generate-sqltomvt $(TILESET_FILE) \
--key --gzip --postgis-ver 3.3.4 \ --key --gzip --postgis-ver 3.0.1 \
--function --fname=getmvt >> ./build/sql/run_last.sql' --function --fname=getmvt >> ./build/sql/run_last.sql'
endif endif
.PHONY: build-sprite
build-sprite: init-dirs
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools bash -c 'spreet /style/icons build/style/sprite && \
spreet --retina /style/icons build/style/sprite@2x'
.PHONY: build-style
build-style: init-dirs
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools bash -c 'style-tools recompose $(TILESET_FILE) $(STYLE_FILE) \
$(STYLE_HEADER_FILE) && \
spreet /style/icons build/style/sprite && spreet --retina /style/icons build/style/sprite@2x'
.PHONY: download-fonts
download-fonts:
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools bash -c '[ ! -d "/export/fonts" ] && mkdir /export/fonts && \
echo "Downloading fonts..." && wget -qO /export/noto-sans.zip --show-progress \
https://github.com/openmaptiles/fonts/releases/download/v2.0/noto-sans.zip && \
echo "Unzipping fonts..." && unzip -q /export/noto-sans.zip -d /export/fonts && rm /export/noto-sans.zip || \
echo "Fonts already exist."'
.PHONY: clean .PHONY: clean
clean: clean-test-data clean:
rm -rf build rm -rf build
clean-test-data:
rm -rf data/changes.state.txt
rm -rf data/last.state.txt
rm -rf data/changes.repl.json
.PHONY: destroy-db .PHONY: destroy-db
DOCKER_PROJECT = $(shell echo $(DC_PROJECT) | tr A-Z a-z | tr -cd '[:alnum:]') # TODO: Use https://stackoverflow.com/a/27852388/177275
destroy-db: DC_PROJECT := $(shell echo $(DC_PROJECT) | tr A-Z a-z)
destroy-db: destroy-db:
$(DOCKER_COMPOSE) down -v --remove-orphans $(DOCKER_COMPOSE) down -v --remove-orphans
$(DOCKER_COMPOSE) rm -fv $(DOCKER_COMPOSE) rm -fv
docker volume ls -q -f "name=^$(DOCKER_PROJECT)_" | $(XARGS) docker volume rm docker volume ls -q -f "name=^$(DC_PROJECT)_" | $(XARGS) docker volume rm
rm -rf cache rm -rf cache
mkdir cache
.PHONY: start-db-nowait .PHONY: start-db-nowait
start-db-nowait: init-dirs start-db-nowait: init-dirs
@ -373,11 +313,11 @@ OSM_SERVER=$(patsubst download,,$(patsubst download-%,%,$@))
.PHONY: $(ALL_DOWNLOADS) .PHONY: $(ALL_DOWNLOADS)
$(ALL_DOWNLOADS): init-dirs $(ALL_DOWNLOADS): init-dirs
@$(assert_area_is_given) @$(assert_area_is_given)
ifneq ($(url),) ifneq ($(strip $(url)),)
$(if $(OSM_SERVER),$(error url parameter can only be used with non-specific download target:$(newline) make download area=$(area) url="$(url)"$(newline))) $(if $(OSM_SERVER),$(error url parameter can only be used with non-specific download target:$(newline) make download area=$(area) url="$(url)"$(newline)))
endif endif
ifeq (,$(wildcard $(PBF_FILE))) ifeq (,$(wildcard $(PBF_FILE)))
ifeq ($(DIFF_MODE),true) ifeq ($(strip $(DIFF_MODE)),true)
@echo "Downloading $(DOWNLOAD_AREA) with replication support into $(PBF_FILE) and $(IMPOSM_CONFIG_FILE) from $(if $(OSM_SERVER),$(OSM_SERVER),any source)" @echo "Downloading $(DOWNLOAD_AREA) with replication support into $(PBF_FILE) and $(IMPOSM_CONFIG_FILE) from $(if $(OSM_SERVER),$(OSM_SERVER),any source)"
@$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools download-osm $(OSM_SERVER) "$(DOWNLOAD_AREA)" \ @$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools download-osm $(OSM_SERVER) "$(DOWNLOAD_AREA)" \
--imposm-cfg "$(IMPOSM_CONFIG_FILE)" \ --imposm-cfg "$(IMPOSM_CONFIG_FILE)" \
@ -391,7 +331,7 @@ ifeq (,$(wildcard $(PBF_FILE)))
endif endif
@echo "" @echo ""
else else
ifeq ($(DIFF_MODE),true) ifeq ($(strip $(DIFF_MODE)),true)
ifeq (,$(wildcard $(IMPOSM_CONFIG_FILE))) ifeq (,$(wildcard $(IMPOSM_CONFIG_FILE)))
$(error \ $(error \
$(newline) Data files $(PBF_FILE) already exists, but $(IMPOSM_CONFIG_FILE) does not. \ $(newline) Data files $(PBF_FILE) already exists, but $(IMPOSM_CONFIG_FILE) does not. \
@ -424,7 +364,7 @@ psql: start-db-nowait
# Special cache handling for Docker Toolbox on Windows # Special cache handling for Docker Toolbox on Windows
ifeq ($(MSYSTEM),MINGW64) ifeq ($(MSYSTEM),MINGW64)
DC_CONFIG_CACHE := -f docker-compose.yml -f docker-compose-$(MSYSTEM).yml DC_CONFIG_CACHE := -f docker-compose.yml -f docker-compose-$(MSYSTEM).yml
DC_OPTS_CACHE := $(filter-out --user=%,$(DC_OPTS)) DC_OPTS_CACHE := $(strip $(filter-out --user=%,$(DC_OPTS)))
else else
DC_OPTS_CACHE := $(DC_OPTS) DC_OPTS_CACHE := $(DC_OPTS)
endif endif
@ -434,17 +374,13 @@ import-osm: all start-db-nowait
@$(assert_area_is_given) @$(assert_area_is_given)
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-osm $(PBF_FILE)' $(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-osm $(PBF_FILE)'
.PHONY: start-update-osm .PHONY: update-osm
start-update-osm: start-db update-osm: all start-db-nowait
@$(assert_area_is_given) @$(assert_area_is_given)
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) up -d update-osm $(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-update'
.PHONY: stop-update-osm
stop-update-osm:
$(DOCKER_COMPOSE) stop update-osm
.PHONY: import-diff .PHONY: import-diff
import-diff: start-db-nowait import-diff: all start-db-nowait
@$(assert_area_is_given) @$(assert_area_is_given)
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-diff' $(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-diff'
@ -452,21 +388,21 @@ import-diff: start-db-nowait
import-data: start-db import-data: start-db
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) import-data $(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) import-data
.PHONY: import-borders
import-borders: start-db-nowait
@$(assert_area_is_given)
# If CSV borders file already exists, use it without re-parsing
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools sh -c \
'pgwait && import-borders $$([ -f "$(BORDERS_CSV_FILE)" ] && echo load $(BORDERS_CSV_FILE) || echo import $(PBF_FILE))'
.PHONY: import-sql .PHONY: import-sql
import-sql: all start-db-nowait import-sql: all start-db-nowait
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools sh -c 'pgwait && import-sql' | \ $(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools sh -c 'pgwait && import-sql' | \
awk -v s=": WARNING:" '1{print; fflush()} $$0~s{print "\n*** WARNING detected, aborting"; exit(1)}' | \ awk -v s=": WARNING:" '1{print; fflush()} $$0~s{print "\n*** WARNING detected, aborting"; exit(1)}'
awk '1{print; fflush()} $$0~".*ERROR" {txt=$$0} END{ if(txt){print "\n*** ERROR detected, aborting:"; print txt; exit(1)} }'
.PHONY: merge-pbf
merge-pbf:
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools \
osmosis --rb data/belgium.osm.pbf --rb data/netherlands.osm.pbf --merge --wb data/merges.osm.pbf
.PHONY: generate-tiles .PHONY: generate-tiles
generate-tiles: all start-db generate-tiles: all start-db
@echo "WARNING: This Mapnik-based method of tile generation is obsolete. Use generate-tiles-pg instead." @$(assert_area_is_given)
@echo "Generating tiles into $(MBTILES_LOCAL_FILE) (will delete if already exists)..." @echo "Generating tiles into $(MBTILES_LOCAL_FILE) (will delete if already exists)..."
@rm -rf "$(MBTILES_LOCAL_FILE)" @rm -rf "$(MBTILES_LOCAL_FILE)"
$(DOCKER_COMPOSE) run $(DC_OPTS) generate-vectortiles $(DOCKER_COMPOSE) run $(DC_OPTS) generate-vectortiles
@ -476,31 +412,16 @@ generate-tiles: all start-db
.PHONY: generate-tiles-pg .PHONY: generate-tiles-pg
generate-tiles-pg: all start-db generate-tiles-pg: all start-db
@echo "Generating tiles into $(MBTILES_LOCAL_FILE) (will delete if already exists) using PostGIS ST_MVT()..." @$(assert_area_is_given)
@echo "Generating tiles into $(MBTILES_LOCAL_FILE) (will delete if already exists)..."
@rm -rf "$(MBTILES_LOCAL_FILE)" @rm -rf "$(MBTILES_LOCAL_FILE)"
# For some reason Ctrl+C doesn't work here without the -T. Must be pressed twice to stop. $(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools generate-tiles
$(DOCKER_COMPOSE) run -T $(DC_OPTS) openmaptiles-tools generate-tiles
@echo "Updating generated tile metadata ..." @echo "Updating generated tile metadata ..."
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools \ $(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools \
mbtiles-tools meta-generate "$(MBTILES_LOCAL_FILE)" $(TILESET_FILE) --auto-minmax --show-ranges mbtiles-tools meta-generate "$(MBTILES_LOCAL_FILE)" $(TILESET_FILE) --auto-minmax --show-ranges
.PHONY: data/tiles.txt
data/tiles.txt:
find ./data -name "*.tiles" -exec cat {} \; -exec rm {} \; | \
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools \
tile_multiplier $(MIN_ZOOM) $(MAX_ZOOM) >> data/tiles.txt
.PHONY: generate-changed-tiles
generate-changed-tiles: data/tiles.txt
# Re-generating updated tiles, if needed
if [ -s data/tiles.txt ] ; then \
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools refresh-views; \
$(DOCKER_COMPOSE) run $(DC_OPTS) -e LIST_FILE=data/tiles.txt openmaptiles-tools generate-tiles; \
rm data/tiles.txt; \
fi
.PHONY: start-tileserver .PHONY: start-tileserver
start-tileserver: init-dirs build-style download-fonts start-tileserver: init-dirs
@echo " " @echo " "
@echo "***********************************************************" @echo "***********************************************************"
@echo "* " @echo "* "
@ -509,7 +430,7 @@ start-tileserver: init-dirs build-style download-fonts
@echo "* " @echo "* "
@echo "***********************************************************" @echo "***********************************************************"
@echo " " @echo " "
$(DOCKER_COMPOSE_COMMAND) pull tileserver-gl docker pull maptiler/tileserver-gl
@echo " " @echo " "
@echo "***********************************************************" @echo "***********************************************************"
@echo "* " @echo "* "
@ -518,11 +439,7 @@ start-tileserver: init-dirs build-style download-fonts
@echo "* " @echo "* "
@echo "***********************************************************" @echo "***********************************************************"
@echo " " @echo " "
$(DOCKER_COMPOSE) up -d tileserver-gl docker run $(DC_OPTS) -it --name tileserver-gl -v $$(pwd)/data:/data -p $(TPORT):$(TPORT) maptiler/tileserver-gl --port $(TPORT)
.PHONY: stop-tileserver
stop-tileserver:
$(DOCKER_COMPOSE) stop tileserver-gl
.PHONY: start-postserve .PHONY: start-postserve
start-postserve: start-db start-postserve: start-db
@ -554,11 +471,11 @@ start-maputnik: stop-maputnik start-postserve
@echo "* " @echo "* "
@echo "***********************************************************" @echo "***********************************************************"
@echo " " @echo " "
$(DOCKER_COMPOSE) up -d maputnik_editor docker run $(DC_OPTS) --name maputnik_editor -d -p 8088:8888 maputnik/editor
.PHONY: stop-maputnik .PHONY: stop-maputnik
stop-maputnik: stop-maputnik:
-$(DOCKER_COMPOSE) stop maputnik_editor -docker rm -f maputnik_editor
# STAT_FUNCTION=frequency|toplength|variance # STAT_FUNCTION=frequency|toplength|variance
.PHONY: generate-qa .PHONY: generate-qa
@ -606,7 +523,7 @@ psql-list-tables: init-dirs
.PHONY: vacuum-db .PHONY: vacuum-db
vacuum-db: init-dirs vacuum-db: init-dirs
@echo "Start - postgresql: VACUUM ANALYZE VERBOSE;" @echo "Start - postgresql: VACUUM ANALYZE VERBOSE;"
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP=1 -P pager=off -c 'VACUUM (ANALYZE, VERBOSE);' $(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP=1 -P pager=off -c 'VACUUM ANALYZE VERBOSE;'
.PHONY: analyze-db .PHONY: analyze-db
analyze-db: init-dirs analyze-db: init-dirs
@ -619,16 +536,16 @@ list-docker-images:
.PHONY: refresh-docker-images .PHONY: refresh-docker-images
refresh-docker-images: init-dirs refresh-docker-images: init-dirs
ifneq ($(NO_REFRESH),) ifneq ($(strip $(NO_REFRESH)),)
@echo "Skipping docker image refresh" @echo "Skipping docker image refresh"
else else
@echo "" @echo ""
@echo "Refreshing docker images... Use NO_REFRESH=1 to skip." @echo "Refreshing docker images... Use NO_REFRESH=1 to skip."
ifneq ($(USE_PRELOADED_IMAGE),) ifneq ($(strip $(USE_PRELOADED_IMAGE)),)
POSTGIS_IMAGE=openmaptiles/postgis-preloaded \ POSTGIS_IMAGE=openmaptiles/postgis-preloaded \
$(DOCKER_COMPOSE_COMMAND) pull --ignore-pull-failures $(QUIET_FLAG) openmaptiles-tools generate-vectortiles postgres docker-compose pull --ignore-pull-failures $(QUIET_FLAG) openmaptiles-tools generate-vectortiles postgres
else else
$(DOCKER_COMPOSE_COMMAND) pull --ignore-pull-failures $(QUIET_FLAG) openmaptiles-tools generate-vectortiles postgres import-data docker-compose pull --ignore-pull-failures $(QUIET_FLAG) openmaptiles-tools generate-vectortiles postgres import-data
endif endif
endif endif
@ -653,7 +570,7 @@ test-perf-null: init-dirs
.PHONY: build-test-pbf .PHONY: build-test-pbf
build-test-pbf: init-dirs build-test-pbf: init-dirs
$(DOCKER_COMPOSE_COMMAND) run $(DC_OPTS) openmaptiles-tools /tileset/.github/workflows/build-test-data.sh docker-compose run $(DC_OPTS) openmaptiles-tools /tileset/.github/workflows/build-test-data.sh
.PHONY: debug .PHONY: debug
debug: ## Use this target when developing Makefile itself to verify loaded environment variables debug: ## Use this target when developing Makefile itself to verify loaded environment variables
@ -663,45 +580,3 @@ debug: ## Use this target when developing Makefile itself to verify loaded envi
@echo BBOX = $(BBOX) , $$BBOX @echo BBOX = $(BBOX) , $$BBOX
@echo MIN_ZOOM = $(MIN_ZOOM) , $$MIN_ZOOM @echo MIN_ZOOM = $(MIN_ZOOM) , $$MIN_ZOOM
@echo MAX_ZOOM = $(MAX_ZOOM) , $$MAX_ZOOM @echo MAX_ZOOM = $(MAX_ZOOM) , $$MAX_ZOOM
build/import-tests.osm.pbf: init-dirs
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'osmconvert tests/import/*.osm -o=build/import-tests.osm.pbf'
data/changes.state.txt:
cp -f tests/changes.state.txt data/
data/last.state.txt:
cp -f tests/last.state.txt data/
data/changes.repl.json:
cp -f tests/changes.repl.json data/
data/changes.osc.gz: init-dirs
@echo " UPDATE unit test data..."
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'osmconvert tests/update/*.osc --merge-versions -o=data/changes.osc && gzip -f data/changes.osc'
test-sql: clean refresh-docker-images destroy-db start-db-nowait build/import-tests.osm.pbf data/changes.state.txt data/last.state.txt data/changes.repl.json build/mapping.yaml data/changes.osc.gz build/openmaptiles.tm2source/data.yml build/mapping.yaml build-sql
$(eval area := changes)
@echo "Load IMPORT test data"
sed -ir "s/^[#]*\s*MAX_ZOOM=.*/MAX_ZOOM=14/" .env
sed -ir "s/^[#]*\s*DIFF_MODE=.*/DIFF_MODE=false/" .env
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-osm build/import-tests.osm.pbf'
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) import-data
@echo "Apply OpenMapTiles SQL schema to test data @ Zoom 14..."
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools sh -c 'pgwait && import-sql' | \
awk -v s=": WARNING:" '1{print; fflush()} $$0~s{print "\n*** WARNING detected, aborting"; exit(1)}' | \
awk '1{print; fflush()} $$0~".*ERROR" {txt=$$0} END{ if(txt){print "\n*** ERROR detected, aborting:"; print txt; exit(1)} }'
@echo "Test SQL output for Import Test Data"
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && psql.sh < tests/test-post-import.sql' 2>&1 | \
awk -v s="ERROR:" '1{print; fflush()} $$0~s{print "*** ERROR detected, aborting"; exit(1)}'
@echo "Run UPDATE process on test data..."
sed -ir "s/^[#]*\s*DIFF_MODE=.*/DIFF_MODE=true/" .env
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-diff'
@echo "Test SQL output for Update Test Data"
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && psql.sh < tests/test-post-update.sql' 2>&1 | \
awk -v s="ERROR:" '1{print; fflush()} $$0~s{print "*** ERROR detected, aborting"; exit(1)}'

View File

@ -2,7 +2,7 @@
### Req: ### Req:
* CPU: AMD64 ( = Intel 64 bit) * CPU: AMD64 ( = Intel 64 bit)
* The base docker debian images are x86_64 based, so the ARM, MIPS currently not supported! * The base docker debian images are x86_64 based, so the ARM,MIPS currently not supported!
* Operating system * Operating system
* Linux is suggested * Linux is suggested
* The development and the testing platform is Linux. * The development and the testing platform is Linux.
@ -59,7 +59,7 @@ If you have problems with the quickstart
IF the previous step is working, IF the previous step is working,
THEN you can test other available quickstart extracts ( based on [Geofabrik extracts](http://download.geofabrik.de/index.html) ) ! THEN you can test other available quickstart extracts ( based on [Geofabrik extracts](http://download.geofabrik.de/index.html) ) !
* We are using https://github.com/julien-noblet/download-geofabrik tool * We are using https://github.com/julien-noblet/download-geofabrik tool
* The current extract list, and more information -> `make list-geofabrik` or `make list-bbbike` * The current extract list, and more information -> `make list`
This is generating `.mbtiles` for your area : [ MIN_ZOOM: "0" - MAX_ZOOM: "7" ] This is generating `.mbtiles` for your area : [ MIN_ZOOM: "0" - MAX_ZOOM: "7" ]
@ -415,63 +415,33 @@ the current output:
``` ```
============================================================================== ==============================================================================
OpenMapTiles https://github.com/openmaptiles/openmaptiles OpenMapTiles https://github.com/openmaptiles/openmaptiles
Hints for testing areas Hints for testing areas
make list-geofabrik # list actual geofabrik OSM extracts for download -> <<your-area>> make download-geofabrik-list # list actual geofabrik OSM extracts for download -> <<your-area>>
./quickstart.sh <<your-area>> # example: ./quickstart.sh madagascar ./quickstart.sh <<your-area>> # example: ./quickstart.sh madagascar
Hints for designers: Hints for designers:
make start-maputnik # start Maputnik Editor + dynamic tile server [ see http://localhost:8088 ] make start-postserve # start Postserver + Maputnik Editor [ see localhost:8088 ]
make stop-maputnik # stop Maputnik Editor + dynamic tile server make start-tileserver # start maptiler/tileserver-gl [ see localhost:8081 ]
make start-postserve # start dynamic tile server [ see http://localhost:8090 ]
make stop-postserve # stop dynamic tile server
make start-tileserver # start maptiler/tileserver-gl [ see http://localhost:8081 ]
make stop-tileserver # stop maptiler/tileserver-gl
Hints for developers: Hints for developers:
make # build source code make # build source code
make bash # start openmaptiles-tools /bin/bash terminal make download-geofabrik area=albania # download OSM data from geofabrik, and create config file
make generate-bbox-file # compute bounding box of a data file and store it in a file
make generate-devdoc # generate devdoc including graphs for all layers [./layers/...]
make generate-qa # statistics for a given layer's field
make generate-tiles-pg # generate vector tiles based on .env settings using PostGIS ST_MVT()
make generate-tiles # generate vector tiles based on .env settings using Mapnik (obsolete)
make generate-changed-tiles # Generate tiles changed by import-diff
make test-sql # run unit tests on the OpenMapTiles SQL schema
cat .env # list PG database and MIN_ZOOM and MAX_ZOOM information
cat quickstart.log # transcript of the last ./quickstart.sh run
make help # help about available commands
Hints for downloading & importing data:
make list-geofabrik # list actual geofabrik OSM extracts for download
make list-bbbike # list actual BBBike OSM extracts for download
make download area=albania # download OSM data from any source and create config file
make download-geofabrik area=albania # download OSM data from geofabrik.de and create config file
make download-osmfr area=asia/qatar # download OSM data from openstreetmap.fr and create config file
make download-bbbike area=Amsterdam # download OSM data from bbbike.org and create config file
make import-data # Import data from OpenStreetMapData, Natural Earth and OSM Lake Labels.
make import-osm # Import OSM data with the mapping rules from build/mapping.yaml
make import-diff # Import OSM updates from data/changes.osc.gz
make import-wikidata # Import labels from Wikidata
make import-sql # Import layers (run this after modifying layer SQL)
Hints for database management:
make psql # start PostgreSQL console make psql # start PostgreSQL console
make psql-list-tables # list all PostgreSQL tables make psql-list-tables # list all PostgreSQL tables
make list-views # list PostgreSQL public schema views make psql-vacuum-analyze # PostgreSQL: VACUUM ANALYZE
make list-tables # list PostgreSQL public schema tables make psql-analyze # PostgreSQL: ANALYZE
make vacuum-db # PostgreSQL: VACUUM ANALYZE make generate-qa # statistics for a given layer's field
make analyze-db # PostgreSQL: ANALYZE make generate-devdoc # generate devdoc [./build/devdoc]
make destroy-db # remove docker containers and PostgreSQL data volume make tools-dev # start import-sql /bin/bash terminal
make start-db # start PostgreSQL, creating it if it doesn't exist make db-destroy # remove docker containers, PG data volume
make start-db-preloaded # start PostgreSQL, creating data-prepopulated one if it doesn't exist make docker-unnecessary-clean # clean unnecessary docker image(s) and container(s)
make stop-db # stop PostgreSQL database without destroying the data
Hints for Docker management:
make clean-unnecessary-docker # clean unnecessary docker image(s) and container(s)
make refresh-docker-images # refresh openmaptiles docker images from Docker HUB make refresh-docker-images # refresh openmaptiles docker images from Docker HUB
make remove-docker-images # remove openmaptiles docker images make remove-docker-images # remove openmaptiles docker images
make list-docker-images # show a list of available docker images make list-views # list PostgreSQL public schema views
make list-tables # list PostgreSQL public schema tables
cat .env # list PG database and MIN_ZOOM and MAX_ZOOM information
cat ./quickstart.log # backup of the last ./quickstart.sh
make help # help about available commands
============================================================================== ==============================================================================
``` ```

View File

@ -1,4 +1,4 @@
## OpenMapTiles [![Build Status](https://github.com/openmaptiles/openmaptiles/workflows/OpenMapTiles%20Integrity%20CI/badge.svg?branch=master)](https://github.com/openmaptiles/openmaptiles/actions) ## OpenMapTiles [![Build Status](https://github.com/openmaptiles/openmaptiles/workflows/OMT_CI/badge.svg?branch=master)](https://github.com/openmaptiles/openmaptiles/actions)
OpenMapTiles is an extensible and open tile schema based on the OpenStreetMap. This project is used to generate vector tiles for online zoomable maps. OpenMapTiles is about creating a beautiful basemaps with general layers containing topographic information. More information [openmaptiles.org](https://openmaptiles.org/) and [maptiler.com/data/](https://www.maptiler.com/data/). OpenMapTiles is an extensible and open tile schema based on the OpenStreetMap. This project is used to generate vector tiles for online zoomable maps. OpenMapTiles is about creating a beautiful basemaps with general layers containing topographic information. More information [openmaptiles.org](https://openmaptiles.org/) and [maptiler.com/data/](https://www.maptiler.com/data/).
@ -11,8 +11,7 @@ Please keep in mind that OpenMapTiles schema should display general topographic
- :link: Data for download: https://www.maptiler.com/data/ - :link: Data for download: https://www.maptiler.com/data/
- :link: Hosting https://www.maptiler.com/cloud/ - :link: Hosting https://www.maptiler.com/cloud/
- :link: Create own layer https://github.com/openmaptiles/openmaptiles-skiing - :link: Create own layer https://github.com/openmaptiles/openmaptiles-skiing
- :link: Practical usage of OpenMapTiles https://github.com/maptiler/foss4g-workshop - :link: Discuss at the #openmaptiles channel at [OSM Slack](https://osmus-slack.herokuapp.com/)
- :link: Discuss at the #openmaptiles channel at [OSM Slack](https://slack.openstreetmap.us/)
## Styles ## Styles
@ -21,15 +20,14 @@ You can start from several GL styles supporting the OpenMapTiles vector schema.
:link: [Learn how to create Mapbox GL styles with Maputnik and OpenMapTiles](http://openmaptiles.org/docs/style/maputnik/). :link: [Learn how to create Mapbox GL styles with Maputnik and OpenMapTiles](http://openmaptiles.org/docs/style/maputnik/).
- [OSM OpenMapTiles](./style/README.md)
- [OSM Bright](https://github.com/openmaptiles/osm-bright-gl-style) - [OSM Bright](https://github.com/openmaptiles/osm-bright-gl-style)
- [MapTiler Basic](https://github.com/openmaptiles/maptiler-basic-gl-style)
- [MapTiler 3D](https://github.com/openmaptiles/maptiler-3d-gl-style)
- [Fiord Color](https://github.com/openmaptiles/fiord-color-gl-style)
- [MapTiler Toner](https://github.com/openmaptiles/maptiler-toner-gl-style)
- [OSM Liberty](https://github.com/maputnik/osm-liberty)
- [Positron](https://github.com/openmaptiles/positron-gl-style) - [Positron](https://github.com/openmaptiles/positron-gl-style)
- [Dark Matter](https://github.com/openmaptiles/dark-matter-gl-style) - [Dark Matter](https://github.com/openmaptiles/dark-matter-gl-style)
- [Klokantech Basic](https://github.com/openmaptiles/klokantech-basic-gl-style)
- [Klokantech 3D](https://github.com/openmaptiles/klokantech-3d-gl-style)
- [Fiord Color](https://github.com/openmaptiles/fiord-color-gl-style)
- [Toner](https://github.com/openmaptiles/toner-gl-style)
- [OSM Liberty](https://github.com/maputnik/osm-liberty)
We also ported over our favorite old raster styles (TM2). We also ported over our favorite old raster styles (TM2).
@ -73,10 +71,6 @@ To work on OpenMapTiles you need Docker.
- Install [Docker](https://docs.docker.com/engine/installation/). Minimum version is 1.12.3+. - Install [Docker](https://docs.docker.com/engine/installation/). Minimum version is 1.12.3+.
- Install [Docker Compose](https://docs.docker.com/compose/install/). Minimum version is 1.7.1+. - Install [Docker Compose](https://docs.docker.com/compose/install/). Minimum version is 1.7.1+.
### Microsoft Windows Subsystem for Linux (WSL)
Please use Linux `/home/user/` directory, not Windows e.g. `/mnt/c` directory.
### Build ### Build
Build the tileset. Build the tileset.
@ -89,7 +83,7 @@ make
``` ```
You can execute the following manual steps (for better understanding) You can execute the following manual steps (for better understanding)
or use the provided `quickstart.sh` script to automatically download and import given area. If area is not given, Albania will be imported. List of available areas `make list-geofabrik`. or use the provided `quickstart.sh` script to automatically download and import given area. If area is not given, albania will be imported.
``` ```
./quickstart.sh <area> ./quickstart.sh <area>
@ -103,7 +97,7 @@ Now start up the database container.
make start-db make start-db
``` ```
Import external data from [OpenStreetMapData](http://osmdata.openstreetmap.de/), [Natural Earth](http://www.naturalearthdata.com/) and [OpenStreetMap Lake Labels](https://github.com/openmaptiles/osm-lakelines). Natural Earth country boundaries are used in the few lowest zoom levels. Import external data from [OpenStreetMapData](http://osmdata.openstreetmap.de/), [Natural Earth](http://www.naturalearthdata.com/) and [OpenStreetMap Lake Labels](https://github.com/lukasmartinelli/osm-lakelines).
```bash ```bash
make import-data make import-data
@ -115,11 +109,12 @@ Download OpenStreetMap data extracts from any source like [Geofabrik](http://dow
make download area=albania make download area=albania
``` ```
[Import OpenStreetMap data](https://github.com/openmaptiles/openmaptiles-tools/blob/master/bin/import-osm) with the mapping rules from [Import OpenStreetMap data](https://github.com/openmaptiles/openmaptiles-tools/tree/master/docker/import-osm) with the mapping rules from
`build/mapping.yaml` (which has been created by `make`). Run after any change in layers definition (any change in `mapping.yaml`). `build/mapping.yaml` (which has been created by `make`). Run after any change in layers definition. Also create borders table using extra processing with [osmborder](https://github.com/pnorman/osmborder) tool.
```bash ```bash
make import-osm make import-osm
make import-borders
``` ```
Import labels from Wikidata. If an OSM feature has [Key:wikidata](https://wiki.openstreetmap.org/wiki/Key:wikidata), OpenMapTiles check corresponding item in Wikidata and use its [labels](https://www.wikidata.org/wiki/Help:Label) for languages listed in [openmaptiles.yaml](openmaptiles.yaml). So the generated vector tiles includes multi-languages in name field. Import labels from Wikidata. If an OSM feature has [Key:wikidata](https://wiki.openstreetmap.org/wiki/Key:wikidata), OpenMapTiles check corresponding item in Wikidata and use its [labels](https://www.wikidata.org/wiki/Help:Label) for languages listed in [openmaptiles.yaml](openmaptiles.yaml). So the generated vector tiles includes multi-languages in name field.
@ -131,13 +126,6 @@ make import-wikidata
``` ```
### Work on Layers ### Work on Layers
Each time you modify a layer's `mapping.yaml` file or add new OSM tags, run `make` and `make import-osm` to recreate tables (potentially with additional data) in PostgreSQL. With the new data, there can be new Wikidata records also.
```
make clean
make
make import-osm
make import-wikidata
```
Each time you modify layer SQL code run `make` and `make import-sql`. Each time you modify layer SQL code run `make` and `make import-sql`.
@ -147,47 +135,18 @@ make
make import-sql make import-sql
``` ```
Each time you make a modification that adds a new feature to vector tiles e.g. adding new OSM tags, modify the layer
style snippet by adding new style layer so the changes are propagated visually into the style.
All new style layers must have the `order` value which determines the order or rendering in the map style.
After the layer style snippet is modified run:
```bash
make build-style
```
Now you are ready to **generate the vector tiles**. By default, `./.env` specifies the entire planet BBOX for zooms 0-7, but running `generate-bbox-file` will analyze the data file and set the `BBOX` param to limit tile generation. Now you are ready to **generate the vector tiles**. By default, `./.env` specifies the entire planet BBOX for zooms 0-7, but running `generate-bbox-file` will analyze the data file and set the `BBOX` param to limit tile generation.
``` ```
make generate-bbox-file # compute data bbox -- not needed for the whole planet or for downloaded area by `make download` make generate-bbox-file # compute data bbox -- not needed for the whole planet
make generate-tiles-pg # generate tiles make generate-tiles # generate tiles
``` ```
### Workflow to generate tiles
If you go from top to bottom you can be sure that it will generate a .mbtiles file out of a .osm.pbf file
```
make clean # clean / remove existing build files
make # generate build files
make start-db # start up the database container.
make import-data # Import external data from OpenStreetMapData, Natural Earth and OpenStreetMap Lake Labels.
make download area=albania # download albania .osm.pbf file -- can be skipped if a .osm.pbf file already existing
make import-osm # import data into postgres
make import-wikidata # import Wikidata
make import-sql # create / import sql functions
make generate-bbox-file # compute data bbox -- not needed for the whole planet or for downloaded area by `make download`
make generate-tiles-pg # generate tiles
```
Instead of calling `make download area=albania` you can add a .osm.pbf file in the `data` folder `openmaptiles/data/your_area_file.osm.pbf`
To change the name of the output filename, you can modify the variable `MBTILES_FILE` in the `.env` file or set up the environment variable `MBTILES_FILE` before running `./quickstart.sh` or `make generate-tiles-pg` (e.g., `MBTILES_FILENAME=monaco.mbtiles ./quickstart.sh monaco`).
## License ## License
All code in this repository is under the [BSD license](./LICENSE.md). Design and the cartography decisions encoded in the schema and SQL are licensed under [CC-BY](./LICENSE.md). All code in this repository is under the [BSD license](./LICENSE.md) and the cartography decisions encoded in the schema and SQL are licensed under [CC-BY](./LICENSE.md).
Products or services using maps derived from OpenMapTiles schema need to **visibly credit "OpenMapTiles.org"** or **reference "OpenMapTiles"** with a link to https://openmaptiles.org/. Exceptions to attribution requirement can be granted on request. Products or services using maps derived from OpenMapTiles schema need to visibly credit "OpenMapTiles.org" or reference "OpenMapTiles" with a link to https://openmaptiles.org/. Exceptions to attribution requirement can be granted on request.
For a browsable electronic map based on OpenMapTiles and OpenStreetMap data, the For a browsable electronic map based on OpenMapTiles and OpenStreetMap data, the
credit should appear in the corner of the map. For example: credit should appear in the corner of the map. For example:

View File

@ -1,18 +0,0 @@
# OpenMapTiles SQL Testing
The OpenMapTiles SQL tests ensure that OSM data is properly imported and updated in the OpenMapTiles data schema. The tests work by injecting test OSM data into the database and checking to ensure that the data is properly reflected in the SQL output.
Usage:
`make clean && make test-sql`
## How it works
The SQL tests consist of the following parts:
1. **Test import data**, located in `tests/import`. This test data is in the [OSM XML](https://wiki.openstreetmap.org/wiki/OSM_XML) format and contains the data that should be initially injected into the database. The files are numbered in order to ensure that each test data file OSM id numbers that are unique from the other files. For example, the file starting with `100` will use node ids from 100000-199999, way ids from 1000-1999, and relation ids from 100-199.
2. **Test update data**, located in `tests/update`. This test data is in the [osmChange XML](https://wiki.openstreetmap.org/wiki/OsmChange) format, and contains the data that will be used to update the test import data (in order to verify that the update process is working correctly. These files are also numbered using the same scheme as the test import data.
3. **Import SQL test script**, located at `tests/test-post-import.sql`. This script is executed after the test import data has been injected, and runs SQL-based checks to ensure that the import data was properly imported. If there are failures in the tests, an entry will be added to the table `omt_test_failures`, with one record per error that occurs during the import process. A test failure will also fail the build. To inspect the test failure messages, run `make psql` and issue the comment `SELECT * FROM omt_test_failures`.
4. **Update SQL test script**, located at `tests/test-post-update.sql`. This script performs the same function as the import test script, except that it occurs after the test update data has been applied to the database. Note that script will only run if the import script passes all tests.

106
UPDATE.md
View File

@ -1,116 +1,42 @@
# Keeping the Vector Tiles Updated # Keep the vector tiles updated
Once you have imported OpenMapTiles you can also keep it up to date by importing the latest OSM changes and Once you have imported OpenMapTiles you can also keep it up to date by importing the latest OSM changes and
regenerating the tables. regenerating the tables.
## Import ## Import
You can either keep the database up to date based on the daily (or minutely) OSM change feed You can either keep the database up to date based on the daily OSM change feed
or import specific change files. or import specific change files.
### Choosing the Download Source
While GeoFabrik currently provides extracts of basically all countries, they provide only daily updates.
If you need minutely updates you might want to try openstreetmap.fr, for example like this: `make download-osmfr area=africa/eritrea`, which configures minutely updates.
### Preparations
If you plan to keep data updated automatically, before importing any data, make sure to set
```
DIFF_MODE=true
```
in the `.env`
Now download fresh data:
```
make download area=your-area-of-choice
```
### Keep Database Updated ### Keep Database Updated
You can use imposm3 to keep the database updated (thanks to the [work by @stirringhalo](https://github.com/openmaptiles/openmaptiles/pull/131)). You can use the new imposm3 feature to keep the database updated (thanks to the [work by @stirringhalo](https://github.com/openmaptiles/openmaptiles/pull/131)). This will automatically download
This will repeatedly download the OSM change feed and import it into the database. the OSM change feed and import it into the database.
In order to be able to update the database, the initial download and import of the OSM data must be done when `DIFF_MODE=true` is set in the `.env` file. After each run you should also have a list of tiles that have updated.
In this mode the initial download also sets the update source and the update intervals.
To start the update process please use
```
make start-update-osm
```
To stop the update process please use
```
make stop-update-osm
```
After each update activation, **imposm3** will store lists of updated tiles in text format in subfolders of the `diffdir`,
named for the date(s) on which the import took place (`YYYYMMDD`).
See [Generate Changed Tiles](#generate-changed-tiles) below on how this file can be used.
#### Note
When the update process is actively updating the DB it is impossible to successfully generate tiles,
as there will be conflicts and deadlocks related to the DB access.
Unfortunately, there is no known way to execute an external command in-between rounds of the `update-osm` process.
#### Troubleshooting
The log file for osm update can be viewed using
``` ```
docker-compose logs --tail 100 --follow update-osm make update-osm
```
Use `Ctrl-C` to stop following the log.
The output will be similar to this:
```
[info] Importing #4889572 including changes till ....... +0000 UTC (2h10m10s behind)
```
It might take some time to catch up with the latest changes, but the "time behind" should decrease until it is a few minutes.
If it doesn't, you need to download a new extract or check that there are enough system resources to keep-up with the changes.
Finally you will get an output like this - this indicates, that some 6 objects were changed:
```
[progress] 3s C: 0/s (0) N: 0/s (0) W: 0/s (6) R: 0/s (0)
```
The process will keep running foreverprint something like this - which just means that no changes were in the latest changeset:
```
[progress] 0s C: 0/s (0) N: 0/s (0) W: 0/s (0) R: 0/s (0)
``` ```
### Import Change File ### Import Change File
You may perform a one-time import of OSM changes from the `changes.osc.gz` file in your import folder using Given you have a file `changes.osc.gz` in your import folder. Once you ran the import command you should also have a list of tiles that have updated.
``` ```
make import-diff make import-diff
``` ```
Similar to[Keep Database Updated](#keep_database_updated) above, **imposm3** will store the list of updated tiles in text file in subfolders of the `diffdir`,
named for the date on which the import took place (`YYYYMMDD`).
See [Generate Changed Tiles](#generate-changed-tiles) below.
#### Note
There is no `make` command for downloading OSM changes into `changes.osc.gz`.
You may perform this task using [`osmupdate`](https://wiki.openstreetmap.org/wiki/Osmupdate),
[pyosmium-get-changes](https://docs.osmcode.org/pyosmium/latest/tools_get_changes.html),
or downloading the changefile directly from the replication server.
## Generate Changed Tiles ## Generate Changed Tiles
To generate all changed tiles, based on the lists of all updated tiles, and update the existing MBtiles file, please use After the import has finished **imposm3** will store lists of tiles in text format in subfolders of the `diffdir`,
named for the date(s) on which the import took place (`YYYYMMDD`).
Copy and merge the files to `tiles.txt` in the import folder (`data`), either manually or with the following command, which also removes duplicate tiles so they are only generated once:
```
cd data && sort ./*/*.tiles | uniq > tiles.txt
```
Now run the command to read the tilelist and write the vector tiles for it to a new MBTiles.
``` ```
make generate-changed-tiles docker-compose run generate-changed-vectortiles
``` ```

View File

@ -1,322 +0,0 @@
#!/bin/bash
set -o errexit
set -o pipefail
set -o nounset
###########################################
# This file is a modified quickstart.sh, intended to load benelux region
#
# If --empty is not given, use preloaded docker image to speed up
if [ $# -gt 0 ] && [[ $1 == --empty ]]; then
export USE_PRELOADED_IMAGE=""
shift
else
export USE_PRELOADED_IMAGE=true
fi
if [ $# -eq 0 ]; then
# default test area
export area=belgium
echo "No parameter - set area=$area "
else
export area=$1
fi
if [ $# -eq 2 ]; then
osm_server=$2
fi
## Min versions ...
MIN_COMPOSE_VER=1.7.1
MIN_DOCKER_VER=1.12.3
STARTTIME=$(date +%s)
STARTDATE=$(date +"%Y-%m-%dT%H:%M%z")
log_file=./quickstart.log
rm -f $log_file
echo " "
echo "====================================================================================="
echo " Docker check & Download images "
echo "-------------------------------------------------------------------------------------"
echo "====> : Please check the Docker and docker-compose version!"
echo " : We are using docker-compose v2 file format! see more at https://docs.docker.com/"
echo " : Minimum required Docker version: $MIN_DOCKER_VER+"
echo " : Minimum required docker-compose version: $MIN_COMPOSE_VER+"
echo " : See the .travis build for the currently supported versions."
echo " : Your docker system:"
docker --version
docker-compose --version
# based on: http://stackoverflow.com/questions/16989598/bash-comparing-version-numbers
function version { echo "$@" | tr -cs '0-9.' '.' | awk -F. '{ printf("%03d%03d%03d\n", $1,$2,$3); }'; }
COMPOSE_VER=$(docker-compose version --short)
if [ "$(version "$COMPOSE_VER")" -lt "$(version "$MIN_COMPOSE_VER")" ]; then
echo "ERR: Your Docker-compose version is known to have bugs, please update docker-compose!"
exit 1
fi
DOCKER_VER="$(docker -v | awk -F '[ ,]+' '{ print $3 }')"
if [ "$(version "$DOCKER_VER")" -lt "$(version "$MIN_DOCKER_VER")" ]; then
echo "ERR: Your Docker version is not compatible. Please Update docker!"
exit 1
fi
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Pulling or refreshing OpenMapTiles docker images "
make refresh-docker-images
##### backup log from here ...
exec &> >(tee -a "$log_file")
echo " "
echo "====================================================================================="
echo " Start processing "
echo "-------------------------------------------------------------------------------------"
echo "====> : OpenMapTiles quickstart! [ https://github.com/openmaptiles/openmaptiles ] "
echo " : This will be logged to the $log_file file (for debugging) and to the screen"
echo " : Area : $area "
echo " : Download server : ${osm_server:-unset (automatic)} "
echo " : Preloaded image : $USE_PRELOADED_IMAGE "
echo " : Git version : $(git rev-parse HEAD) "
echo " : Started : $STARTDATE "
echo " : Your bash version: $BASH_VERSION"
echo " : Your OS : $OSTYPE"
docker --version
docker-compose --version
if [[ "$OSTYPE" == "linux-gnu" ]]; then
echo " "
echo "-------------------------------------------------------------------------------------"
echo " : This is working on x86_64 ; Your kernel is:"
uname -r
uname -m
KERNEL_CPU_VER=$(uname -m)
if [ "$KERNEL_CPU_VER" != "x86_64" ]; then
echo "ERR: Sorry this is working only on x86_64!"
exit 1
fi
echo " : --- Memory, CPU info ---- "
mem=$( grep MemTotal /proc/meminfo | awk '{print $2}' | xargs -I {} echo "scale=4; {}/1024^2" | bc )
echo "System memory (GB): ${mem}"
grep SwapTotal /proc/meminfo
echo "CPU number: $(grep -c processor /proc/cpuinfo) x $(grep "bogomips" /proc/cpuinfo | head -1)"
grep Free /proc/meminfo
else
echo " "
echo "Warning : Platforms other than Linux are less tested"
echo " "
fi
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Stopping running services & removing old containers"
make destroy-db
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Existing OpenMapTiles docker images. Will use version $(source .env && echo "$TOOLS_VERSION")"
docker images | grep openmaptiles
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Create directories if they don't exist"
make init-dirs
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Removing old MBTILES if exists ( ./data/${area}.mbtiles ) "
rm -f "./data/${area}.mbtiles"
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Downloading ${area} from ${osm_server:-any source}..."
make "download${osm_server:+-${osm_server}}"
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Remove old generated source files ( ./build/* ) ( if they exist ) "
make clean
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Code generating from the layer definitions ( ./build/mapping.yaml; ./build/sql/* )"
echo " : The tool source code: https://github.com/openmaptiles/openmaptiles-tools "
echo " : But we generate the tm2source, Imposm mappings and SQL functions from the layer definitions! "
make all
echo " "
echo "-------------------------------------------------------------------------------------"
if [[ "$USE_PRELOADED_IMAGE" == true ]]; then
echo "====> : Start PostgreSQL service using postgis image preloaded with this data:"
echo " : * Water data from http://osmdata.openstreetmap.de"
echo " : Data license: https://osmdata.openstreetmap.de/info/license.html"
echo " : * Natural Earth from http://www.naturalearthdata.com"
echo " : Terms-of-use: http://www.naturalearthdata.com/about/terms-of-use"
echo " : * OpenStreetMap Lakelines data https://github.com/lukasmartinelli/osm-lakelines"
echo " :"
echo " : Source code: https://github.com/openmaptiles/openmaptiles-tools/tree/master/docker/import-data"
echo " : includes all data from the import-data image"
echo " :"
echo " : Use the --empty flag to start with an empty database:"
echo " : ./quickstart.sh --empty albania "
echo " : If desired, you can manually import data by using these commands:"
echo " : make destroy-db"
echo " : make start-db"
echo " : make import-data"
echo " :"
echo " : Source code: https://github.com/openmaptiles/openmaptiles-tools/tree/master/docker/postgis-preloaded"
echo " : Thank you https://www.postgresql.org ! Thank you http://postgis.org !"
make start-db-preloaded
else
echo "====> : Start PostgreSQL service using empty database and importing all the data:"
echo " : * Water data from http://osmdata.openstreetmap.de"
echo " : Data license: https://osmdata.openstreetmap.de/info/license.html"
echo " : * Natural Earth from http://www.naturalearthdata.com"
echo " : Terms-of-use: http://www.naturalearthdata.com/about/terms-of-use"
echo " : * OpenStreetMap Lakelines data https://github.com/lukasmartinelli/osm-lakelines"
echo " :"
echo " : Source code: https://github.com/openmaptiles/openmaptiles-tools/tree/master/docker/import-data"
echo " : includes all data from the import-data image"
echo " :"
echo " : Thank you https://www.postgresql.org ! Thank you http://postgis.org !"
make start-db
make import-data
fi
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Start importing OpenStreetMap data: ${area} -> imposm3[./build/mapping.yaml] -> PostgreSQL"
echo " : Imposm3 documentation: https://imposm.org/docs/imposm3/latest/index.html "
echo " : Thank you Omniscale! "
echo " : Source code: https://github.com/openmaptiles/openmaptiles-tools/tree/master/docker/import-osm "
echo " : The OpenstreetMap data license: https://www.openstreetmap.org/copyright (ODBL) "
echo " : Thank you OpenStreetMap Contributors ! "
make import-osm
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Start importing border ${area} data into PostgreSQL using osmborder"
echo " : Source code: https://github.com/pnorman/osmborder"
echo " : Data license: http://www.openstreetmap.org/copyright"
echo " : Thank you Paul Norman"
make import-borders
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Start importing Wikidata: Wikidata Query Service -> PostgreSQL"
echo " : The Wikidata license: CC0 - https://www.wikidata.org/wiki/Wikidata:Main_Page "
echo " : Thank you Wikidata Contributors ! "
make import-wikidata
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Start SQL postprocessing: ./build/sql/* -> PostgreSQL "
echo " : Source code: https://github.com/openmaptiles/openmaptiles-tools/blob/master/bin/import-sql"
# If the output contains a WARNING, stop further processing
# Adapted from https://unix.stackexchange.com/questions/307562
make import-sql
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Analyze PostgreSQL tables"
make analyze-db
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Testing PostgreSQL tables to match layer definitions metadata"
make test-perf-null
echo " "
echo "-------------------------------------------------------------------------------------"
if [[ "$(source .env ; echo "$BBOX")" = "-180.0,-85.0511,180.0,85.0511" ]]; then
if [[ "$area" != "planet" ]]; then
echo "====> : Compute bounding box for tile generation"
make generate-bbox-file ${MIN_ZOOM:+MIN_ZOOM="${MIN_ZOOM}"} ${MAX_ZOOM:+MAX_ZOOM="${MAX_ZOOM}"}
else
echo "====> : Skipping bbox calculation when generating the entire planet"
fi
else
echo "====> : Bounding box is set in .env file"
fi
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Start generating MBTiles (containing gzipped MVT PBF) from a TM2Source project. "
echo " : TM2Source project definitions : ./build/openmaptiles.tm2source/data.yml "
echo " : Output MBTiles: ./data/${area}.mbtiles "
echo " : Source code: https://github.com/openmaptiles/openmaptiles-tools/tree/master/docker/generate-vectortiles "
echo " : We are using a lot of Mapbox Open Source tools! : https://github.com/mapbox "
echo " : Thank you https://www.mapbox.com !"
echo " : See other MVT tools : https://github.com/mapbox/awesome-vector-tiles "
echo " : "
echo " : You will see a lot of deprecated warning in the log! This is normal! "
echo " : like : Mapnik LOG> ... is deprecated and will be removed in Mapnik 4.x ... "
make generate-tiles
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Stop PostgreSQL service ( but we keep PostgreSQL data volume for debugging )"
make stop-db
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Inputs - Outputs md5sum for debugging "
rm -f ./data/quickstart_checklist.chk
{
find build -type f | sort | xargs md5sum
find data -type f | sort | xargs md5sum
} >> ./data/quickstart_checklist.chk
cat ./data/quickstart_checklist.chk
ENDTIME=$(date +%s)
echo " "
echo " "
echo "-------------------------------------------------------------------------------------"
echo "-- S u m m a r y --"
echo "-------------------------------------------------------------------------------------"
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : (disk space) We have created a lot of docker images: "
echo " : Hint: you can remove with: docker rmi IMAGE "
docker images | grep openmaptiles
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : (disk space) We have created the new vectortiles ( ./data/${area}.mbtiles ) "
echo " : Please respect the licenses (OdBL for OSM data) of the sources when distributing the MBTiles file."
echo " : Data directory content:"
ls -la ./data
echo " "
echo "-------------------------------------------------------------------------------------"
echo "The ./quickstart.sh $area is finished! "
echo "It took $((ENDTIME - STARTTIME)) seconds to complete"
echo "We saved the log file to $log_file (for debugging) You can compare with the travis log !"
echo " "
echo "Start experimenting and check the QUICKSTART.MD file!"
echo " "
echo "* Use make start-maputnik to explore tile generation on request"
echo "* Use make start-tileserver to view pre-generated tiles"
echo " "
echo "Available help commands (make help) "
make help
echo "-------------------------------------------------------------------------------------"
echo " Acknowledgments "
echo " Generated vector tiles are produced work of OpenStreetMap data. "
echo " Such tiles are reusable under CC-BY license granted by OpenMapTiles team: "
echo " https://github.com/openmaptiles/openmaptiles/#license "
echo " Maps made with these vector tiles must display a visible credit: "
echo " © OpenMapTiles © OpenStreetMap contributors "
echo " "
echo " Thanks to all free, open source software developers and Open Data Contributors! "
echo "-------------------------------------------------------------------------------------"

View File

@ -1,46 +0,0 @@
#!/usr/bin/env bash
set -e
# Test script for generating tiles for Lummen and region (e.g. including Hasselt)
export SOURCE=osmfr
export AREA=europe/belgium
export BBOX=2.255544,49.857797,6.586904,51.389246
# Which zooms to generate in make generate-tiles
export MIN_ZOOM=0
export MAX_ZOOM=16
# Update the .env to match
sed -i "s/MIN_ZOOM=.*/MIN_ZOOM=${MIN_ZOOM}/" .env
sed -i "s/MAX_ZOOM=.*/MAX_ZOOM=${MAX_ZOOM}/" .env
sed -i "s/BBOX=.*/BBOX=${BBOX}/" .env
# Setup
make clean
make DC_OPTS=--rm
# Start from a clean db
make start-db
make destroy-db
make import-data
# ALTERNATIVE would be this
#make start-db-preloaded
# Download
make download-${SOURCE} area="${AREA}"
# Import (yes we're doing import borders twice, it crashes on first run, oddly enough)
make import-osm
make import-wikidata
make import-borders || true
make import-sql
make analyze-db
# (This potentially screws stuff up?!)
#rm -rf data/${AREA}.dc-config.yml
#make generate-dc-config
# Generate
make generate-tiles-pg
make stop-db

View File

@ -1,46 +0,0 @@
#!/usr/bin/env bash
set -e
# Script for generating tiles for what cerxes server should host
export SOURCE=geofabrik
export AREA=europe
export BBOX=-4.382176,47.705913,13.053127,53.803762
# Which zooms to generate in make generate-tiles
export MIN_ZOOM=0
export MAX_ZOOM=16
# Update the .env to match
sed -i "s/MIN_ZOOM=.*/MIN_ZOOM=${MIN_ZOOM}/" .env
sed -i "s/MAX_ZOOM=.*/MAX_ZOOM=${MAX_ZOOM}/" .env
sed -i "s/BBOX=.*/BBOX=${BBOX}/" .env
# Setup
make clean
make DC_OPTS=--rm
# Start from a clean db
make start-db
make destroy-db
make import-data
# ALTERNATIVE would be this
#make start-db-preloaded
# Download
make download-${SOURCE} area="${AREA}"
# Import (yes we're doing import borders twice, it crashes on first run, oddly enough)
make import-osm
make import-wikidata
make import-borders || true
make import-sql
make analyze-db
# (This potentially screws stuff up?!)
#rm -rf data/${AREA}.dc-config.yml
#make generate-dc-config
# Generate
make generate-tiles-pg
make stop-db

View File

@ -12,25 +12,14 @@ services:
postgres: postgres:
image: "${POSTGIS_IMAGE:-openmaptiles/postgis}:${TOOLS_VERSION}" image: "${POSTGIS_IMAGE:-openmaptiles/postgis}:${TOOLS_VERSION}"
# image: harbor.cerxes.net/openmaptiles/postgis:5.3
# Use "command: postgres -c jit=off" for PostgreSQL 11+ because of slow large MVT query processing # Use "command: postgres -c jit=off" for PostgreSQL 11+ because of slow large MVT query processing
# Use "shm_size: 512m" if you want to prevent a possible 'No space left on device' during 'make generate-tiles-pg'
volumes: volumes:
- pgdata:/var/lib/postgresql/data - pgdata:/var/lib/postgresql/data
networks: networks:
- postgres - postgres
ports: ports:
- "${PGPORT:-5432}:${PGPORT:-5432}" - "5432"
env_file: .env env_file: .env-postgres
environment:
# postgress container uses old variable names
POSTGRES_DB: ${PGDATABASE:-openmaptiles}
POSTGRES_USER: ${PGUSER:-openmaptiles}
POSTGRES_PASSWORD: ${PGPASSWORD:-openmaptiles}
PGPORT: ${PGPORT:-5432}
shm_size: 16384MB
#command: /docker-entrypoint-initdb.d/01_tune-postgis.sh
#command: /docker-entrypoint-initdb.d/01_tune-postgis.sh && echo 'tuned' && postgres -c 'config_file=/etc/postgresql/postgresql.conf'
import-data: import-data:
image: "openmaptiles/import-data:${TOOLS_VERSION}" image: "openmaptiles/import-data:${TOOLS_VERSION}"
@ -38,7 +27,7 @@ services:
networks: networks:
- postgres - postgres
openmaptiles-tools: &openmaptiles-tools openmaptiles-tools:
image: "openmaptiles/openmaptiles-tools:${TOOLS_VERSION}" image: "openmaptiles/openmaptiles-tools:${TOOLS_VERSION}"
env_file: .env env_file: .env
environment: environment:
@ -53,13 +42,12 @@ services:
BBOX: ${BBOX} BBOX: ${BBOX}
# Imposm configuration file describes how to load updates when enabled # Imposm configuration file describes how to load updates when enabled
IMPOSM_CONFIG_FILE: ${IMPOSM_CONFIG_FILE} IMPOSM_CONFIG_FILE: ${IMPOSM_CONFIG_FILE}
# Which files to use during import-borders processing
BORDERS_CLEANUP_FILE: ${BORDERS_CLEANUP_FILE}
BORDERS_PBF_FILE: ${BORDERS_PBF_FILE}
BORDERS_CSV_FILE: ${BORDERS_CSV_FILE}
# Control import-sql processes # Control import-sql processes
MAX_PARALLEL_PSQL: ${MAX_PARALLEL_PSQL} MAX_PARALLEL_PSQL: ${MAX_PARALLEL_PSQL}
PGDATABASE: ${PGDATABASE:-openmaptiles}
PGUSER: ${PGUSER:-openmaptiles}
PGPASSWORD: ${PGPASSWORD:-openmaptiles}
PGPORT: ${PGPORT:-5432}
MBTILES_FILE: ${MBTILES_FILE}
networks: networks:
- postgres - postgres
volumes: volumes:
@ -69,11 +57,6 @@ services:
- ./build/sql:/sql - ./build/sql:/sql
- ./build:/mapping - ./build:/mapping
- ./cache:/cache - ./cache:/cache
- ./style:/style
update-osm:
<<: *openmaptiles-tools
command: import-update
generate-changed-vectortiles: generate-changed-vectortiles:
image: "openmaptiles/generate-vectortiles:${TOOLS_VERSION}" image: "openmaptiles/generate-vectortiles:${TOOLS_VERSION}"
@ -85,13 +68,10 @@ services:
- postgres - postgres
env_file: .env env_file: .env
environment: environment:
FILTER_MAPNIK_OUTPUT: ${FILTER_MAPNIK_OUTPUT}
MBTILES_NAME: ${MBTILES_FILE} MBTILES_NAME: ${MBTILES_FILE}
# Control tilelive-copy threads # Control tilelive-copy threads
COPY_CONCURRENCY: ${COPY_CONCURRENCY} COPY_CONCURRENCY: ${COPY_CONCURRENCY}
PGDATABASE: ${PGDATABASE:-openmaptiles}
PGUSER: ${PGUSER:-openmaptiles}
PGPASSWORD: ${PGPASSWORD:-openmaptiles}
PGPORT: ${PGPORT:-5432}
generate-vectortiles: generate-vectortiles:
image: "openmaptiles/generate-vectortiles:${TOOLS_VERSION}" image: "openmaptiles/generate-vectortiles:${TOOLS_VERSION}"
@ -102,6 +82,7 @@ services:
- postgres - postgres
env_file: .env env_file: .env
environment: environment:
FILTER_MAPNIK_OUTPUT: ${FILTER_MAPNIK_OUTPUT}
MBTILES_NAME: ${MBTILES_FILE} MBTILES_NAME: ${MBTILES_FILE}
BBOX: ${BBOX} BBOX: ${BBOX}
MIN_ZOOM: ${MIN_ZOOM} MIN_ZOOM: ${MIN_ZOOM}
@ -109,10 +90,6 @@ services:
# Control tilelive-copy threads # Control tilelive-copy threads
COPY_CONCURRENCY: ${COPY_CONCURRENCY} COPY_CONCURRENCY: ${COPY_CONCURRENCY}
# #
PGDATABASE: ${PGDATABASE:-openmaptiles}
PGUSER: ${PGUSER:-openmaptiles}
PGPASSWORD: ${PGPASSWORD:-openmaptiles}
PGPORT: ${PGPORT:-5432}
postserve: postserve:
image: "openmaptiles/openmaptiles-tools:${TOOLS_VERSION}" image: "openmaptiles/openmaptiles-tools:${TOOLS_VERSION}"
@ -126,22 +103,3 @@ services:
- "${PPORT:-8090}:${PPORT:-8090}" - "${PPORT:-8090}:${PPORT:-8090}"
volumes: volumes:
- .:/tileset - .:/tileset
maputnik_editor:
image: "maputnik/editor"
ports:
- "8088:8888"
tileserver-gl:
image: "maptiler/tileserver-gl:latest"
command:
- --port
- "${TPORT:-8080}"
- --config
- "/style/config.json"
ports:
- "${TPORT:-8080}:${TPORT:-8080}"
volumes:
- ./data:/data
- ./style:/style
- ./build:/build

150
europe.sh
View File

@ -1,150 +0,0 @@
set -e
#area=europe
area=planet
make refresh-docker-images
# Show mem info
mem=$( grep MemTotal /proc/meminfo | awk '{print $2}' | xargs -I {} echo "scale=4; {}/1024^2" | bc )
echo "System memory (GB): ${mem}"
grep SwapTotal /proc/meminfo
echo "CPU number: $(grep -c processor /proc/cpuinfo) x $(grep "bogomips" /proc/cpuinfo | head -1)"
grep Free /proc/meminfo
make destroy-db
make init-dirs
# rm -f "./data/${area}.mbtiles"
#echo "====> : Downloading ${area} from ${osm_server:-any source}..."
#area=$area make "download${osm_server:+-${osm_server}}"
make clean
make all
make start-db
make import-data
# about 1.25hrs, Presumably Disk IO limited.
# If there is an option to double the amount of processes this might
# also help. Seeing about 50% usage per core
# [May 21 08:52:53] [INFO] Reading OSM data took: 22m9.040318577s
# [May 21 09:42:37] [INFO] Writing OSM data took: 49m44.555518107s
# [May 21 09:45:15] [INFO] Importing OSM data took: 52m21.813870474s
# [May 21 09:45:15] [INFO] [PostGIS] Rotating tables took: 38.69062ms
# [May 21 09:45:15] [INFO] Imposm took: 1h14m30.893125729s
make import-osm area=$area
make import-borders area=$area
make import-wikidata
# It not doing the materialized view in parallel despite postgres 12?!
# argh
make import-sql
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Analyze PostgreSQL tables"
make analyze-db
echo " "
echo "-------------------------------------------------------------------------------------"
echo "====> : Testing PostgreSQL tables to match layer definitions metadata"
# TODO: this right here shows us whats wrong. settings aren't optimal (see performance tips tabs)
# shared_buffersm work_mem, max_worker_processes,
# TODO, Ideal solution: Autoconfigure according to performance tips (and max_worker_processes == HT Cores ( or 1.5X HT Cores)
# (1.5x HT Cores might make sense as most work is probably memory-limited (latency), not CPU limited)
make test-perf-null
#echo " "
#echo "-------------------------------------------------------------------------------------"
#
#if [[ "$(source .env ; echo "$BBOX")" = "-180.0,-85.0511,180.0,85.0511" ]]; then
# if [[ "$area" != "planet" ]]; then
# echo "====> : Compute bounding box for tile generation"
# make generate-bbox-file ${MIN_ZOOM:+MIN_ZOOM="${MIN_ZOOM}"} ${MAX_ZOOM:+MAX_ZOOM="${MAX_ZOOM}"}
# else
# echo "====> : Skipping bbox calculation when generating the entire planet"
# fi
#
#else
# echo "====> : Bounding box is set in .env file"
#fi
#
##echo " "
##echo "-------------------------------------------------------------------------------------"
##echo "====> : Start generating MBTiles (containing gzipped MVT PBF) from a TM2Source project. "
##echo " : TM2Source project definitions : ./build/openmaptiles.tm2source/data.yml "
##echo " : Output MBTiles: ./data/${area}.mbtiles "
##echo " : Source code: https://github.com/openmaptiles/openmaptiles-tools/tree/master/docker/generate-vectortiles "
##echo " : We are using a lot of Mapbox Open Source tools! : https://github.com/mapbox "
##echo " : Thank you https://www.mapbox.com !"
##echo " : See other MVT tools : https://github.com/mapbox/awesome-vector-tiles "
##echo " : "
##echo " : You will see a lot of deprecated warning in the log! This is normal! "
##echo " : like : Mapnik LOG> ... is deprecated and will be removed in Mapnik 4.x ... "
make generate-tiles area=europe
#OVERLAY
##
##echo " "
##echo "-------------------------------------------------------------------------------------"
##echo "====> : Stop PostgreSQL service ( but we keep PostgreSQL data volume for debugging )"
##make stop-db
##
##echo " "
##echo "-------------------------------------------------------------------------------------"
##echo "====> : Inputs - Outputs md5sum for debugging "
##rm -f ./data/quickstart_checklist.chk
##{
## find build -type f | sort | xargs md5sum
## find data -type f | sort | xargs md5sum
##} >> ./data/quickstart_checklist.chk
##cat ./data/quickstart_checklist.chk
##
##ENDTIME=$(date +%s)
##
##echo " "
##echo " "
##echo "-------------------------------------------------------------------------------------"
##echo "-- S u m m a r y --"
##echo "-------------------------------------------------------------------------------------"
##echo " "
##echo "-------------------------------------------------------------------------------------"
##echo "====> : (disk space) We have created a lot of docker images: "
##echo " : Hint: you can remove with: docker rmi IMAGE "
##docker images | grep openmaptiles
##
##echo " "
##echo "-------------------------------------------------------------------------------------"
##echo "====> : (disk space) We have created the new vectortiles ( ./data/${area}.mbtiles ) "
##echo " : Please respect the licenses (OdBL for OSM data) of the sources when distributing the MBTiles file."
##echo " : Data directory content:"
##ls -la ./data
##
##echo " "
##echo "-------------------------------------------------------------------------------------"
##echo "The ./quickstart.sh $area is finished! "
##echo "It took $((ENDTIME - STARTTIME)) seconds to complete"
##echo "We saved the log file to $log_file (for debugging) You can compare with the travis log !"
##echo " "
##echo "Start experimenting and check the QUICKSTART.MD file!"
##echo " "
##echo "* Use make start-maputnik to explore tile generation on request"
##echo "* Use make start-tileserver to view pre-generated tiles"
##echo " "
##echo "Available help commands (make help) "
##make help
##
##echo "-------------------------------------------------------------------------------------"
##echo " Acknowledgments "
##echo " Generated vector tiles are produced work of OpenStreetMap data. "
##echo " Such tiles are reusable under CC-BY license granted by OpenMapTiles team: "
##echo " https://github.com/openmaptiles/openmaptiles/#license "
##echo " Maps made with these vector tiles must display a visible credit: "
##echo " © OpenMapTiles © OpenStreetMap contributors "
##echo " "
##echo " Thanks to all free, open source software developers and Open Data Contributors! "
##echo "-------------------------------------------------------------------------------------"

View File

@ -1,37 +0,0 @@
#!/bin/sh
# A script to run the "integrity" continuous integration script.
area=monaco
echo MIN_ZOOM=0 >> .env
echo MAX_ZOOM=14 >> .env
./quickstart.sh $area
export TEST_MODE=yes
make generate-devdoc
area=europe/monaco
echo DIFF_MODE=true >> .env
# Cleanup
rm -fr data build cache
# Create data/$area.repl.json
make download-geofabrik area=$area
# Download 2+ month old data
export old_date=$(date --date="$(date +%Y-%m-15) -2 month" +'%y%m01')
echo Downloading $old_date extract of $area
docker-compose run --rm --user=$(id -u):$(id -g) openmaptiles-tools sh -c "wget -O data/$area.osm.pbf http://download.geofabrik.de/$area-$old_date.osm.pbf"
# Initial import and tile generation
./quickstart.sh $area
sleep 2
echo Downloading updates
# Loop to recover from potential "ERROR 429: Too Many Requests"
docker-compose run --rm --user=$(id -u):$(id -g) openmaptiles-tools sh -c "
while ! osmupdate --keep-tempfiles --base-url=$(sed -n 's/ *\"replication_url\": //p' data/$area.repl.json) data/$area.osm.pbf data/changes.osc.gz ; do
sleep 2;
echo Sleeping...;
sleep 630;
done"
echo Downloading updates completed
echo Importing updates
make import-diff
echo Generating new tiles
make generate-tiles-pg

View File

@ -1,11 +1,11 @@
-- etldoc: layer_aerodrome_label[shape=record fillcolor=lightpink, style="rounded,filled", label="layer_aerodrome_label | <z8> z8 | <z9> z9 | <z10_> z10+" ] ; -- etldoc: layer_aerodrome_label[shape=record fillcolor=lightpink, style="rounded,filled", label="layer_aerodrome_label | <z10_> z10+" ] ;
CREATE OR REPLACE FUNCTION layer_aerodrome_label(bbox geometry, CREATE OR REPLACE FUNCTION layer_aerodrome_label(bbox geometry,
zoom_level integer) zoom_level integer)
RETURNS TABLE RETURNS TABLE
( (
id bigint, osm_id bigint,
geometry geometry, geometry geometry,
name text, name text,
name_en text, name_en text,
@ -19,37 +19,18 @@ CREATE OR REPLACE FUNCTION layer_aerodrome_label(bbox geometry,
) )
AS AS
$$ $$
SELECT
-- etldoc: osm_aerodrome_label_point -> layer_aerodrome_label:z8
-- etldoc: osm_aerodrome_label_point -> layer_aerodrome_label:z9
ABS(osm_id) AS id, -- mvt feature IDs can't be negative
geometry,
name,
COALESCE(NULLIF(name_en, ''), name) AS name_en,
COALESCE(NULLIF(name_de, ''), name, name_en) AS name_de,
tags,
aerodrome_type AS class,
NULLIF(iata, '') AS iata,
NULLIF(icao, '') AS icao,
substring(ele FROM E'^(-?\\d+)(\\D|$)')::int AS ele,
round(substring(ele FROM E'^(-?\\d+)(\\D|$)')::int * 3.2808399)::int AS ele_ft
FROM osm_aerodrome_label_point
WHERE geometry && bbox
AND aerodrome_type = 'international'
AND iata <> ''
AND zoom_level BETWEEN 8 AND 9
UNION ALL
SELECT SELECT
-- etldoc: osm_aerodrome_label_point -> layer_aerodrome_label:z10_ -- etldoc: osm_aerodrome_label_point -> layer_aerodrome_label:z10_
ABS(osm_id) AS id, -- mvt feature IDs can't be negative osm_id,
geometry, geometry,
name, name,
COALESCE(NULLIF(name_en, ''), name) AS name_en, COALESCE(NULLIF(name_en, ''), name) AS name_en,
COALESCE(NULLIF(name_de, ''), name, name_en) AS name_de, COALESCE(NULLIF(name_de, ''), name, name_en) AS name_de,
tags, tags,
aerodrome_type AS class, CASE
%%FIELD_MAPPING: class %%
ELSE 'other'
END AS class,
NULLIF(iata, '') AS iata, NULLIF(iata, '') AS iata,
NULLIF(icao, '') AS icao, NULLIF(icao, '') AS icao,
substring(ele FROM E'^(-?\\d+)(\\D|$)')::int AS ele, substring(ele FROM E'^(-?\\d+)(\\D|$)')::int AS ele,

View File

@ -1,13 +1,13 @@
layer: layer:
id: aerodrome_label id: "aerodrome_label"
description: | description: |
[Aerodrome labels](http://wiki.openstreetmap.org/wiki/Tag:aeroway%3Daerodrome) [Aerodrome labels](http://wiki.openstreetmap.org/wiki/Tag:aeroway%3Daerodrome)
buffer_size: 64 buffer_size: 64
srs: +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over srs: +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over
fields: fields:
name: The OSM [`name`](http://wiki.openstreetmap.org/wiki/Key:name) value of the aerodrome. Language-specific values are in `name:xx`. name: The OSM [`name`](http://wiki.openstreetmap.org/wiki/Key:name) value of the aerodrome.
name_en: English name `name:en` if available, otherwise `name`. This is deprecated and will be removed in a future release in favor of `name:en`. name_en: English name `name:en` if available, otherwise `name`.
name_de: German name `name:de` if available, otherwise `name` or `name:en`. This is deprecated and will be removed in a future release in favor of `name:de`. name_de: German name `name:de` if available, otherwise `name` or `name:en`.
class: class:
description: | description: |
Distinguish between more and less important aerodromes. Distinguish between more and less important aerodromes.
@ -38,10 +38,10 @@ layer:
ele_ft: Elevation (`ele`) in feets. ele_ft: Elevation (`ele`) in feets.
datasource: datasource:
geometry_field: geometry geometry_field: geometry
key_field: id key_field: osm_id
key_field_as_attribute: no key_field_as_attribute: no
srid: 900913 srid: 900913
query: (SELECT id, geometry, name, name_en, name_de, {name_languages}, class, iata, icao, ele, ele_ft FROM layer_aerodrome_label(!bbox!, z(!scale_denominator!))) AS t query: (SELECT osm_id, geometry, name, name_en, name_de, {name_languages}, class, iata, icao, ele, ele_ft FROM layer_aerodrome_label(!bbox!, z(!scale_denominator!))) AS t
schema: schema:
- ./update_aerodrome_label_point.sql - ./update_aerodrome_label_point.sql
- ./aerodrome_label.sql - ./aerodrome_label.sql

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

After

Width:  |  Height:  |  Size: 11 KiB

View File

@ -1,69 +0,0 @@
{
"layers": [
{
"id": "airport-label-major",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "aerodrome_label",
"minzoom": 8,
"maxzoom": 17,
"layout": {
"icon-size": 1,
"text-font": [
"Noto Sans Italic"
],
"text-size": {
"stops": [
[
8,
10
],
[
14,
12
]
]
},
"icon-image": "aerodrome.12",
"text-field": {
"stops": [
[
8,
" "
],
[
11,
"{name:latin}\n{name:nonlatin}"
]
]
},
"visibility": "visible",
"text-anchor": "top",
"text-offset": [
0,
0.6
],
"text-padding": 2,
"text-optional": true,
"symbol-z-order": "auto",
"text-max-width": 9,
"icon-allow-overlap": false,
"text-allow-overlap": false
},
"paint": {
"text-color": "#5e3b9e",
"text-halo-blur": 0.5,
"text-halo-color": "rgba(255, 255, 255, 0.8)",
"text-halo-width": 1
},
"filter": [
"all",
[
"has",
"iata"
]
],
"order": 185
}
]
}

View File

@ -2,17 +2,11 @@ DROP TRIGGER IF EXISTS trigger_flag ON osm_aerodrome_label_point;
DROP TRIGGER IF EXISTS trigger_store ON osm_aerodrome_label_point; DROP TRIGGER IF EXISTS trigger_store ON osm_aerodrome_label_point;
DROP TRIGGER IF EXISTS trigger_refresh ON aerodrome_label.updates; DROP TRIGGER IF EXISTS trigger_refresh ON aerodrome_label.updates;
-- Partial index for zoom 8/9 queries
CREATE INDEX IF NOT EXISTS osm_aerodrome_label_point_type_partial_idx
ON osm_aerodrome_label_point USING gist (geometry)
WHERE aerodrome_type = 'international'
AND iata <> '';
CREATE SCHEMA IF NOT EXISTS aerodrome_label; CREATE SCHEMA IF NOT EXISTS aerodrome_label;
CREATE TABLE IF NOT EXISTS aerodrome_label.osm_ids CREATE TABLE IF NOT EXISTS aerodrome_label.osm_ids
( (
osm_id bigint PRIMARY KEY osm_id bigint
); );
-- etldoc: osm_aerodrome_label_point -> osm_aerodrome_label_point -- etldoc: osm_aerodrome_label_point -> osm_aerodrome_label_point
@ -28,17 +22,6 @@ $$
WHERE (full_update OR osm_id IN (SELECT osm_id FROM aerodrome_label.osm_ids)) WHERE (full_update OR osm_id IN (SELECT osm_id FROM aerodrome_label.osm_ids))
AND COALESCE(tags->'name:latin', tags->'name:nonlatin', tags->'name_int') IS NULL AND COALESCE(tags->'name:latin', tags->'name:nonlatin', tags->'name_int') IS NULL
AND tags != update_tags(tags, geometry); AND tags != update_tags(tags, geometry);
UPDATE osm_aerodrome_label_point
SET aerodrome_type=
CASE
%%FIELD_MAPPING: class %%
ELSE 'other' END
WHERE (full_update OR osm_id IN (SELECT osm_id FROM aerodrome_label.osm_ids))
AND aerodrome_type !=
CASE
%%FIELD_MAPPING: class %%
ELSE 'other' END;
$$ LANGUAGE SQL; $$ LANGUAGE SQL;
SELECT update_aerodrome_label_point(true); SELECT update_aerodrome_label_point(true);
@ -48,7 +31,11 @@ SELECT update_aerodrome_label_point(true);
CREATE OR REPLACE FUNCTION aerodrome_label.store() RETURNS trigger AS CREATE OR REPLACE FUNCTION aerodrome_label.store() RETURNS trigger AS
$$ $$
BEGIN BEGIN
INSERT INTO aerodrome_label.osm_ids VALUES (NEW.osm_id) ON CONFLICT (osm_id) DO NOTHING; IF (tg_op = 'DELETE') THEN
INSERT INTO aerodrome_label.osm_ids VALUES (OLD.osm_id);
ELSE
INSERT INTO aerodrome_label.osm_ids VALUES (NEW.osm_id);
END IF;
RETURN NULL; RETURN NULL;
END; END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
@ -73,11 +60,6 @@ DECLARE
t TIMESTAMP WITH TIME ZONE := clock_timestamp(); t TIMESTAMP WITH TIME ZONE := clock_timestamp();
BEGIN BEGIN
RAISE LOG 'Refresh aerodrome_label'; RAISE LOG 'Refresh aerodrome_label';
-- Analyze tracking and source tables before performing update
ANALYZE aerodrome_label.osm_ids;
ANALYZE osm_aerodrome_label_point;
PERFORM update_aerodrome_label_point(false); PERFORM update_aerodrome_label_point(false);
-- noinspection SqlWithoutWhere -- noinspection SqlWithoutWhere
DELETE FROM aerodrome_label.osm_ids; DELETE FROM aerodrome_label.osm_ids;
@ -90,17 +72,15 @@ END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
CREATE TRIGGER trigger_store CREATE TRIGGER trigger_store
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_aerodrome_label_point ON osm_aerodrome_label_point
FOR EACH ROW FOR EACH ROW
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE aerodrome_label.store(); EXECUTE PROCEDURE aerodrome_label.store();
CREATE TRIGGER trigger_flag CREATE TRIGGER trigger_flag
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_aerodrome_label_point ON osm_aerodrome_label_point
FOR EACH STATEMENT FOR EACH STATEMENT
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE aerodrome_label.flag(); EXECUTE PROCEDURE aerodrome_label.flag();
CREATE CONSTRAINT TRIGGER trigger_refresh CREATE CONSTRAINT TRIGGER trigger_refresh

Binary file not shown.

Before

Width:  |  Height:  |  Size: 165 KiB

After

Width:  |  Height:  |  Size: 166 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 32 KiB

After

Width:  |  Height:  |  Size: 32 KiB

View File

@ -1,203 +0,0 @@
{
"layers": [
{
"id": "aeroway_fill",
"type": "fill",
"source": "openmaptiles",
"source-layer": "aeroway",
"minzoom": 11,
"layout": {
"visibility": "visible"
},
"paint": {
"fill-color": {
"stops": [
[
6,
"rgba(223, 223, 228, 1)"
],
[
12,
"rgba(232, 231, 223, 1)"
]
]
},
"fill-opacity": 1
},
"metadata": {},
"filter": [
"==",
"$type",
"Polygon"
],
"order": 3
},
{
"id": "aeroway_runway",
"type": "line",
"source": "openmaptiles",
"source-layer": "aeroway",
"minzoom": 11,
"layout": {
"visibility": "visible"
},
"paint": {
"line-color": "rgba(178, 181, 209, 1)",
"line-width": {
"base": 1.2,
"stops": [
[
11,
3
],
[
20,
48
]
]
},
"line-opacity": 1
},
"metadata": {},
"filter": [
"all",
[
"==",
"$type",
"LineString"
],
[
"==",
"class",
"runway"
]
],
"order": 22
},
{
"id": "aeroway_taxiway",
"type": "line",
"source": "openmaptiles",
"source-layer": "aeroway",
"minzoom": 11,
"layout": {
"visibility": "visible"
},
"paint": {
"line-color": "rgba(178, 181, 209, 1)",
"line-width": {
"base": 1.2,
"stops": [
[
11,
1
],
[
20,
24
]
]
},
"line-opacity": 1
},
"metadata": {},
"filter": [
"all",
[
"==",
"$type",
"LineString"
],
[
"==",
"class",
"taxiway"
]
],
"order": 23
},
{
"id": "airport_label",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "aeroway",
"minzoom": 14,
"layout": {
"text-font": [
"Noto Sans Italic",
"Noto Sans Regular"
],
"text-size": {
"stops": [
[
15,
9
],
[
19,
15
]
]
},
"text-field": "{ref}",
"visibility": "visible",
"symbol-placement": "line"
},
"paint": {
"text-color": "#333333",
"text-halo-color": "rgba(255, 255, 255, 0.8)",
"text-halo-width": 1
},
"filter": [
"all",
[
"in",
"class",
"runway",
"taxiway"
]
],
"order": 186
},
{
"id": "airport_gate",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "aeroway",
"minzoom": 16.5,
"layout": {
"text-font": [
"Noto Sans Regular"
],
"text-size": {
"stops": [
[
17,
9
],
[
19,
15
]
]
},
"text-field": "{ref}",
"visibility": "visible"
},
"paint": {
"text-color": "rgba(135, 135, 135, 1)",
"text-halo-color": "rgba(255, 255, 255, 1)",
"text-halo-width": 1
},
"filter": [
"all",
[
"==",
"class",
"gate"
]
],
"order": 187
}
]
}

View File

@ -1,156 +1,204 @@
-- This statement can be deleted after the border importer image stops creating this object as a table
DO
$$
BEGIN
DROP TABLE IF EXISTS osm_border_linestring_gen_z13 CASCADE;
EXCEPTION
WHEN wrong_object_type THEN
END;
$$ LANGUAGE plpgsql;
-- etldoc: osm_border_linestring -> osm_border_linestring_gen_z13 -- etldoc: osm_border_linestring -> osm_border_linestring_gen_z13
-- etldoc: osm_border_linestring_adm -> osm_border_linestring_gen_z13 -- etldoc: osm_border_linestring_adm -> osm_border_linestring_gen_z13
-- etldoc: osm_border_disp_linestring -> osm_border_linestring_gen_z13
DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z13 CASCADE; DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z13 CASCADE;
CREATE MATERIALIZED VIEW osm_border_linestring_gen_z13 AS CREATE MATERIALIZED VIEW osm_border_linestring_gen_z13 AS
( (
SELECT ST_Simplify(ST_Collect(geometry), ZRes(14)) AS geometry, SELECT ST_Simplify(geometry, ZRes(14)) AS geometry, NULL::text AS adm0_l, NULL::text AS adm0_r, admin_level, disputed, maritime
MAX(adm0_l) AS adm0_l, FROM osm_border_linestring
MAX(adm0_r) AS adm0_r, WHERE admin_level BETWEEN 3 AND 10
MIN(admin_level) AS admin_level, UNION ALL
BOOL_OR(disputed) AS disputed, SELECT ST_Simplify(geometry, ZRes(14)) AS geometry, adm0_l, adm0_r, admin_level, disputed, maritime
MAX(name) AS name, FROM osm_border_linestring_adm
MAX(claimed_by) AS claimed_by, ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
BOOL_OR(maritime) AS maritime
FROM (
-- All admin 3-10 boundaries
SELECT osm_id,
geometry,
NULL::text AS adm0_l,
NULL::text AS adm0_r,
MIN(admin_level) AS admin_level,
BOOL_OR(disputed)
OR BOOL_OR(dispute)
OR BOOL_OR(border_status = 'disputed')
OR BOOL_OR(disputed_by <> '') AS disputed,
NULLIF(name, '') AS name,
NULLIF(claimed_by, '') AS claimed_by,
BOOL_OR(maritime) AS maritime
FROM osm_border_linestring
WHERE admin_level BETWEEN 3 AND 10
AND type = 1 -- ways only
GROUP BY osm_id, geometry, name, claimed_by
UNION ALL
-- All non-disputed admin 2 boundaries
SELECT osm_id,
geometry,
adm0_l,
adm0_r,
admin_level,
FALSE AS disputed,
NULL::text AS name,
NULL::text AS claimed_by,
maritime
FROM osm_border_linestring_adm
UNION ALL
-- All disputed admin 2 boundaries
SELECT osm_id,
geometry,
NULL::text AS adm0_l,
NULL::text AS adm0_r,
2::int AS admin_level,
TRUE AS disputed,
NULLIF(name, '') AS name,
NULLIF(claimed_by, '') AS claimed_by,
maritime
FROM osm_border_disp_linestring
GROUP BY osm_id, geometry, name, claimed_by, maritime
) AS merged_boundary
GROUP by osm_id
)/* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z13_idx ON osm_border_linestring_gen_z13 USING gist (geometry); CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z13_idx ON osm_border_linestring_gen_z13 USING gist (geometry);
-- This statement can be deleted after the border importer image stops creating this object as a table
DO
$$
BEGIN
DROP TABLE IF EXISTS osm_border_linestring_gen_z12 CASCADE;
EXCEPTION
WHEN wrong_object_type THEN
END;
$$ LANGUAGE plpgsql;
-- etldoc: osm_border_linestring_gen_z13 -> osm_border_linestring_gen_z12 -- etldoc: osm_border_linestring_gen_z13 -> osm_border_linestring_gen_z12
DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z12 CASCADE; DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z12 CASCADE;
CREATE MATERIALIZED VIEW osm_border_linestring_gen_z12 AS CREATE MATERIALIZED VIEW osm_border_linestring_gen_z12 AS
( (
SELECT ST_Simplify(geometry, ZRes(13)) AS geometry, adm0_l, adm0_r, admin_level, disputed, name, claimed_by, maritime SELECT ST_Simplify(geometry, ZRes(13)) AS geometry, adm0_l, adm0_r, admin_level, disputed, maritime
FROM osm_border_linestring_gen_z13 FROM osm_border_linestring_gen_z13
WHERE admin_level BETWEEN 2 AND 10 WHERE admin_level BETWEEN 2 AND 10
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ; ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z12_idx ON osm_border_linestring_gen_z12 USING gist (geometry); CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z12_idx ON osm_border_linestring_gen_z12 USING gist (geometry);
-- This statement can be deleted after the border importer image stops creating this object as a table
DO
$$
BEGIN
DROP TABLE IF EXISTS osm_border_linestring_gen_z11 CASCADE;
EXCEPTION
WHEN wrong_object_type THEN
END;
$$ LANGUAGE plpgsql;
-- etldoc: osm_border_linestring_gen_z12 -> osm_border_linestring_gen_z11 -- etldoc: osm_border_linestring_gen_z12 -> osm_border_linestring_gen_z11
DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z11 CASCADE; DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z11 CASCADE;
CREATE MATERIALIZED VIEW osm_border_linestring_gen_z11 AS CREATE MATERIALIZED VIEW osm_border_linestring_gen_z11 AS
( (
SELECT ST_Simplify(geometry, ZRes(12)) AS geometry, adm0_l, adm0_r, admin_level, disputed, name, claimed_by, maritime SELECT ST_Simplify(geometry, ZRes(12)) AS geometry, adm0_l, adm0_r, admin_level, disputed, maritime
FROM osm_border_linestring_gen_z12 FROM osm_border_linestring_gen_z12
WHERE admin_level BETWEEN 2 AND 8 WHERE admin_level BETWEEN 2 AND 8
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ; ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z11_idx ON osm_border_linestring_gen_z11 USING gist (geometry); CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z11_idx ON osm_border_linestring_gen_z11 USING gist (geometry);
-- This statement can be deleted after the border importer image stops creating this object as a table
DO
$$
BEGIN
DROP TABLE IF EXISTS osm_border_linestring_gen_z10 CASCADE;
EXCEPTION
WHEN wrong_object_type THEN
END;
$$ LANGUAGE plpgsql;
-- etldoc: osm_border_linestring_gen_z11 -> osm_border_linestring_gen_z10 -- etldoc: osm_border_linestring_gen_z11 -> osm_border_linestring_gen_z10
DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z10 CASCADE; DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z10 CASCADE;
CREATE MATERIALIZED VIEW osm_border_linestring_gen_z10 AS CREATE MATERIALIZED VIEW osm_border_linestring_gen_z10 AS
( (
SELECT ST_Simplify(geometry, ZRes(11)) AS geometry, adm0_l, adm0_r, admin_level, disputed, name, claimed_by, maritime SELECT ST_Simplify(geometry, ZRes(11)) AS geometry, adm0_l, adm0_r, admin_level, disputed, maritime
FROM osm_border_linestring_gen_z11 FROM osm_border_linestring_gen_z11
WHERE admin_level BETWEEN 2 AND 6 WHERE admin_level BETWEEN 2 AND 6
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ; ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z10_idx ON osm_border_linestring_gen_z10 USING gist (geometry); CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z10_idx ON osm_border_linestring_gen_z10 USING gist (geometry);
-- This statement can be deleted after the border importer image stops creating this object as a table
DO
$$
BEGIN
DROP TABLE IF EXISTS osm_border_linestring_gen_z9 CASCADE;
EXCEPTION
WHEN wrong_object_type THEN
END;
$$ LANGUAGE plpgsql;
-- etldoc: osm_border_linestring_gen_z10 -> osm_border_linestring_gen_z9 -- etldoc: osm_border_linestring_gen_z10 -> osm_border_linestring_gen_z9
DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z9 CASCADE; DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z9 CASCADE;
CREATE MATERIALIZED VIEW osm_border_linestring_gen_z9 AS CREATE MATERIALIZED VIEW osm_border_linestring_gen_z9 AS
( (
SELECT ST_Simplify(geometry, ZRes(10)) AS geometry, adm0_l, adm0_r, admin_level, disputed, name, claimed_by, maritime SELECT ST_Simplify(geometry, ZRes(10)) AS geometry, adm0_l, adm0_r, admin_level, disputed, maritime
FROM osm_border_linestring_gen_z10 FROM osm_border_linestring_gen_z10
-- WHERE admin_level BETWEEN 2 AND 6 WHERE admin_level BETWEEN 2 AND 6
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ; ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z9_idx ON osm_border_linestring_gen_z9 USING gist (geometry); CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z9_idx ON osm_border_linestring_gen_z9 USING gist (geometry);
-- This statement can be deleted after the border importer image stops creating this object as a table
DO
$$
BEGIN
DROP TABLE IF EXISTS osm_border_linestring_gen_z8 CASCADE;
EXCEPTION
WHEN wrong_object_type THEN
END;
$$ LANGUAGE plpgsql;
-- etldoc: osm_border_linestring_gen_z9 -> osm_border_linestring_gen_z8 -- etldoc: osm_border_linestring_gen_z9 -> osm_border_linestring_gen_z8
DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z8 CASCADE; DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z8 CASCADE;
CREATE MATERIALIZED VIEW osm_border_linestring_gen_z8 AS CREATE MATERIALIZED VIEW osm_border_linestring_gen_z8 AS
( (
SELECT ST_Simplify(geometry, ZRes(9)) AS geometry, adm0_l, adm0_r, admin_level, disputed, name, claimed_by, maritime SELECT ST_Simplify(geometry, ZRes(9)) AS geometry, adm0_l, adm0_r, admin_level, disputed, maritime
FROM osm_border_linestring_gen_z9 FROM osm_border_linestring_gen_z9
WHERE admin_level BETWEEN 2 AND 4 WHERE admin_level BETWEEN 2 AND 4
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ; ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z8_idx ON osm_border_linestring_gen_z8 USING gist (geometry); CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z8_idx ON osm_border_linestring_gen_z8 USING gist (geometry);
-- This statement can be deleted after the border importer image stops creating this object as a table
DO
$$
BEGIN
DROP TABLE IF EXISTS osm_border_linestring_gen_z7 CASCADE;
EXCEPTION
WHEN wrong_object_type THEN
END;
$$ LANGUAGE plpgsql;
-- etldoc: osm_border_linestring_gen_z8 -> osm_border_linestring_gen_z7 -- etldoc: osm_border_linestring_gen_z8 -> osm_border_linestring_gen_z7
DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z7 CASCADE; DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z7 CASCADE;
CREATE MATERIALIZED VIEW osm_border_linestring_gen_z7 AS CREATE MATERIALIZED VIEW osm_border_linestring_gen_z7 AS
( (
SELECT ST_Simplify(geometry, ZRes(8)) AS geometry, adm0_l, adm0_r, admin_level, disputed, name, claimed_by, maritime SELECT ST_Simplify(geometry, ZRes(8)) AS geometry, adm0_l, adm0_r, admin_level, disputed, maritime
FROM osm_border_linestring_gen_z8 FROM osm_border_linestring_gen_z8
-- WHERE admin_level BETWEEN 2 AND 4 WHERE admin_level BETWEEN 2 AND 4
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ; ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z7_idx ON osm_border_linestring_gen_z7 USING gist (geometry); CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z7_idx ON osm_border_linestring_gen_z7 USING gist (geometry);
-- This statement can be deleted after the border importer image stops creating this object as a table
DO
$$
BEGIN
DROP TABLE IF EXISTS osm_border_linestring_gen_z6 CASCADE;
EXCEPTION
WHEN wrong_object_type THEN
END;
$$ LANGUAGE plpgsql;
-- etldoc: osm_border_linestring_gen_z7 -> osm_border_linestring_gen_z6 -- etldoc: osm_border_linestring_gen_z7 -> osm_border_linestring_gen_z6
DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z6 CASCADE; DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z6 CASCADE;
CREATE MATERIALIZED VIEW osm_border_linestring_gen_z6 AS CREATE MATERIALIZED VIEW osm_border_linestring_gen_z6 AS
( (
SELECT ST_Simplify(geometry, ZRes(7)) AS geometry, adm0_l, adm0_r, admin_level, disputed, name, claimed_by, maritime SELECT ST_Simplify(geometry, ZRes(7)) AS geometry, adm0_l, adm0_r, admin_level, disputed, maritime
FROM osm_border_linestring_gen_z7 FROM osm_border_linestring_gen_z7
-- WHERE admin_level BETWEEN 2 AND 4 WHERE admin_level BETWEEN 2 AND 4
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ; ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z6_idx ON osm_border_linestring_gen_z6 USING gist (geometry); CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z6_idx ON osm_border_linestring_gen_z6 USING gist (geometry);
-- This statement can be deleted after the border importer image stops creating this object as a table
DO
$$
BEGIN
DROP TABLE IF EXISTS osm_border_linestring_gen_z5 CASCADE;
EXCEPTION
WHEN wrong_object_type THEN
END;
$$ LANGUAGE plpgsql;
-- etldoc: osm_border_linestring_gen_z6 -> osm_border_linestring_gen_z5 -- etldoc: osm_border_linestring_gen_z6 -> osm_border_linestring_gen_z5
DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z5 CASCADE; DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z5 CASCADE;
CREATE MATERIALIZED VIEW osm_border_linestring_gen_z5 AS CREATE MATERIALIZED VIEW osm_border_linestring_gen_z5 AS
( (
SELECT ST_Simplify(geometry, ZRes(6)) AS geometry, adm0_l, adm0_r, admin_level, disputed, name, claimed_by, maritime SELECT ST_Simplify(geometry, ZRes(6)) AS geometry, adm0_l, adm0_r, admin_level, disputed, maritime
FROM osm_border_linestring_gen_z6 FROM osm_border_linestring_gen_z6
-- WHERE admin_level BETWEEN 2 AND 4 WHERE admin_level BETWEEN 2 AND 4
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ; ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z5_idx ON osm_border_linestring_gen_z5 USING gist (geometry); CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z5_idx ON osm_border_linestring_gen_z5 USING gist (geometry);
-- This statement can be deleted after the border importer image stops creating this object as a table
DO
$$
BEGIN
DROP TABLE IF EXISTS osm_border_linestring_gen_z4 CASCADE;
EXCEPTION
WHEN wrong_object_type THEN
END;
$$ LANGUAGE plpgsql;
-- etldoc: osm_border_linestring_gen_z5 -> osm_border_linestring_gen_z4 -- etldoc: osm_border_linestring_gen_z5 -> osm_border_linestring_gen_z4
DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z4 CASCADE; DROP MATERIALIZED VIEW IF EXISTS osm_border_linestring_gen_z4 CASCADE;
CREATE MATERIALIZED VIEW osm_border_linestring_gen_z4 AS CREATE MATERIALIZED VIEW osm_border_linestring_gen_z4 AS
( (
SELECT ST_Simplify(geometry, ZRes(5)) AS geometry, adm0_l, adm0_r, admin_level, disputed, name, claimed_by, maritime SELECT ST_Simplify(geometry, ZRes(5)) AS geometry, adm0_l, adm0_r, admin_level, disputed, maritime
FROM osm_border_linestring_gen_z5 FROM osm_border_linestring_gen_z5
WHERE admin_level = 2 AND maritime WHERE admin_level = 2
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ; ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z4_idx ON osm_border_linestring_gen_z4 USING gist (geometry); CREATE INDEX IF NOT EXISTS osm_border_linestring_gen_z4_idx ON osm_border_linestring_gen_z4 USING gist (geometry);
@ -162,7 +210,7 @@ CREATE MATERIALIZED VIEW ne_10m_admin_0_boundary_lines_land_gen_z4 AS
SELECT ST_Simplify(geometry, ZRes(6)) as geometry, SELECT ST_Simplify(geometry, ZRes(6)) as geometry,
2 AS admin_level, 2 AS admin_level,
(CASE WHEN featurecla LIKE 'Disputed%' THEN TRUE ELSE FALSE END) AS disputed, (CASE WHEN featurecla LIKE 'Disputed%' THEN TRUE ELSE FALSE END) AS disputed,
NULL::text AS disputed_name, (CASE WHEN featurecla LIKE 'Disputed%' THEN 'ne10m_' || ogc_fid ELSE NULL::text END) AS disputed_name,
NULL::text AS claimed_by, NULL::text AS claimed_by,
FALSE AS maritime FALSE AS maritime
FROM ne_10m_admin_0_boundary_lines_land FROM ne_10m_admin_0_boundary_lines_land
@ -170,21 +218,6 @@ WHERE featurecla <> 'Lease limit'
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ; ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS ne_10m_admin_0_boundary_lines_land_gen_z4_idx ON ne_10m_admin_0_boundary_lines_land_gen_z4 USING gist (geometry); CREATE INDEX IF NOT EXISTS ne_10m_admin_0_boundary_lines_land_gen_z4_idx ON ne_10m_admin_0_boundary_lines_land_gen_z4 USING gist (geometry);
-- etldoc: ne_10m_admin_0_boundary_lines_land -> ne_10m_admin_0_boundary_lines_land_disputed
DROP MATERIALIZED VIEW IF EXISTS ne_10m_admin_0_boundary_lines_land_disputed CASCADE;
CREATE MATERIALIZED VIEW ne_10m_admin_0_boundary_lines_land_disputed AS
(
SELECT geometry,
2 AS admin_level,
(CASE WHEN featurecla LIKE 'Disputed%' THEN TRUE ELSE FALSE END) AS disputed,
NULL::text AS disputed_name,
NULL::text AS claimed_by,
FALSE AS maritime
FROM ne_10m_admin_0_boundary_lines_land
WHERE featurecla LIKE 'Disputed%' AND adm0_left = 'South Sudan' AND adm0_right = 'Kenya'
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS ne_10m_admin_0_boundary_lines_land_disputed_idx ON ne_10m_admin_0_boundary_lines_land_disputed USING gist (geometry);
-- ne_10m_admin_1_states_provinces_lines -- ne_10m_admin_1_states_provinces_lines
-- etldoc: ne_10m_admin_1_states_provinces_lines -> ne_10m_admin_1_states_provinces_lines_gen_z4 -- etldoc: ne_10m_admin_1_states_provinces_lines -> ne_10m_admin_1_states_provinces_lines_gen_z4
DROP MATERIALIZED VIEW IF EXISTS ne_10m_admin_1_states_provinces_lines_gen_z4 CASCADE; DROP MATERIALIZED VIEW IF EXISTS ne_10m_admin_1_states_provinces_lines_gen_z4 CASCADE;
@ -195,10 +228,9 @@ SELECT ST_Simplify(geometry, ZRes(6)) as geometry,
FALSE AS disputed, FALSE AS disputed,
NULL::text AS disputed_name, NULL::text AS disputed_name,
NULL::text AS claimed_by, NULL::text AS claimed_by,
FALSE AS maritime, FALSE AS maritime
min_zoom
FROM ne_10m_admin_1_states_provinces_lines FROM ne_10m_admin_1_states_provinces_lines
WHERE min_zoom <= 7.7 WHERE min_zoom <= 7
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ; ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS ne_10m_admin_1_states_provinces_lines_gen_z4_idx ON ne_10m_admin_1_states_provinces_lines_gen_z4 USING gist (geometry); CREATE INDEX IF NOT EXISTS ne_10m_admin_1_states_provinces_lines_gen_z4_idx ON ne_10m_admin_1_states_provinces_lines_gen_z4 USING gist (geometry);
@ -214,7 +246,6 @@ SELECT ST_Simplify(geometry, ZRes(5)) as geometry,
claimed_by, claimed_by,
maritime maritime
FROM ne_10m_admin_1_states_provinces_lines_gen_z4 FROM ne_10m_admin_1_states_provinces_lines_gen_z4
WHERE min_zoom <= 7
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ; ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS ne_10m_admin_1_states_provinces_lines_gen_z3_idx ON ne_10m_admin_1_states_provinces_lines_gen_z3 USING gist (geometry); CREATE INDEX IF NOT EXISTS ne_10m_admin_1_states_provinces_lines_gen_z3_idx ON ne_10m_admin_1_states_provinces_lines_gen_z3 USING gist (geometry);
@ -254,7 +285,7 @@ CREATE MATERIALIZED VIEW ne_50m_admin_0_boundary_lines_land_gen_z3 AS
SELECT ST_Simplify(geometry, ZRes(5)) as geometry, SELECT ST_Simplify(geometry, ZRes(5)) as geometry,
2 AS admin_level, 2 AS admin_level,
(CASE WHEN featurecla LIKE 'Disputed%' THEN TRUE ELSE FALSE END) AS disputed, (CASE WHEN featurecla LIKE 'Disputed%' THEN TRUE ELSE FALSE END) AS disputed,
NULL::text AS disputed_name, (CASE WHEN featurecla LIKE 'Disputed%' THEN 'ne50m_' || ogc_fid ELSE NULL::text END) AS disputed_name,
NULL::text AS claimed_by, NULL::text AS claimed_by,
FALSE AS maritime FALSE AS maritime
FROM ne_50m_admin_0_boundary_lines_land FROM ne_50m_admin_0_boundary_lines_land
@ -297,7 +328,7 @@ CREATE MATERIALIZED VIEW ne_110m_admin_0_boundary_lines_land_gen_z0 AS
SELECT ST_Simplify(geometry, ZRes(2)) as geometry, SELECT ST_Simplify(geometry, ZRes(2)) as geometry,
2 AS admin_level, 2 AS admin_level,
(CASE WHEN featurecla LIKE 'Disputed%' THEN TRUE ELSE FALSE END) AS disputed, (CASE WHEN featurecla LIKE 'Disputed%' THEN TRUE ELSE FALSE END) AS disputed,
NULL::text AS disputed_name, (CASE WHEN featurecla LIKE 'Disputed%' THEN 'ne110m_' || ogc_fid ELSE NULL::text END) AS disputed_name,
NULL::text AS claimed_by, NULL::text AS claimed_by,
FALSE AS maritime FALSE AS maritime
FROM ne_110m_admin_0_boundary_lines_land FROM ne_110m_admin_0_boundary_lines_land
@ -334,10 +365,8 @@ FROM ne_110m_admin_0_boundary_lines_land_gen_z0
-- etldoc: ne_50m_admin_0_boundary_lines_land_gen_z1 -> boundary_z1 -- etldoc: ne_50m_admin_0_boundary_lines_land_gen_z1 -> boundary_z1
-- etldoc: ne_10m_admin_1_states_provinces_lines_gen_z1 -> boundary_z1 -- etldoc: ne_10m_admin_1_states_provinces_lines_gen_z1 -> boundary_z1
-- etldoc: ne_10m_admin_0_boundary_lines_land_disputed -> boundary_z1
-- etldoc: osm_border_disp_linestring_gen_z1 -> boundary_z1 -- etldoc: osm_border_disp_linestring_gen_z1 -> boundary_z1
DROP MATERIALIZED VIEW IF EXISTS boundary_z1 CASCADE; CREATE OR REPLACE VIEW boundary_z1 AS
CREATE MATERIALIZED VIEW boundary_z1 AS
( (
SELECT geometry, SELECT geometry,
admin_level, admin_level,
@ -363,21 +392,18 @@ SELECT geometry,
admin_level, admin_level,
NULL::text AS adm0_l, NULL::text AS adm0_l,
NULL::text AS adm0_r, NULL::text AS adm0_r,
disputed, TRUE AS disputed,
disputed_name, edit_name(name) AS disputed_name,
claimed_by, claimed_by,
maritime maritime
FROM ne_10m_admin_0_boundary_lines_land_disputed FROM osm_border_disp_linestring_gen_z1
); );
CREATE INDEX IF NOT EXISTS boundary_z1_idx ON boundary_z1 USING gist (geometry);
-- etldoc: ne_50m_admin_0_boundary_lines_land_gen_z2 -> boundary_z2 -- etldoc: ne_50m_admin_0_boundary_lines_land_gen_z2 -> boundary_z2
-- etldoc: ne_10m_admin_1_states_provinces_lines_gen_z2 -> boundary_z2 -- etldoc: ne_10m_admin_1_states_provinces_lines_gen_z2 -> boundary_z2
-- etldoc: ne_10m_admin_0_boundary_lines_land_disputed -> boundary_z2
-- etldoc: osm_border_disp_linestring_gen_z2 -> boundary_z2 -- etldoc: osm_border_disp_linestring_gen_z2 -> boundary_z2
DROP MATERIALIZED VIEW IF EXISTS boundary_z2 CASCADE; CREATE OR REPLACE VIEW boundary_z2 AS
CREATE MATERIALIZED VIEW boundary_z2 AS
( (
SELECT geometry, SELECT geometry,
admin_level, admin_level,
@ -403,20 +429,17 @@ SELECT geometry,
admin_level, admin_level,
NULL::text AS adm0_l, NULL::text AS adm0_l,
NULL::text AS adm0_r, NULL::text AS adm0_r,
disputed, TRUE AS disputed,
disputed_name, edit_name(name) AS disputed_name,
claimed_by, claimed_by,
maritime maritime
FROM ne_10m_admin_0_boundary_lines_land_disputed FROM osm_border_disp_linestring_gen_z2
); );
CREATE INDEX IF NOT EXISTS boundary_z2_idx ON boundary_z2 USING gist (geometry);
-- etldoc: ne_50m_admin_0_boundary_lines_land_gen_z3 -> boundary_z3 -- etldoc: ne_50m_admin_0_boundary_lines_land_gen_z3 -> boundary_z3
-- etldoc: ne_10m_admin_1_states_provinces_lines_gen_z3 -> boundary_z3 -- etldoc: ne_10m_admin_1_states_provinces_lines_gen_z3 -> boundary_z3
-- etldoc: ne_10m_admin_0_boundary_lines_land_disputed -> boundary_z3
-- etldoc: osm_border_disp_linestring_gen_z3 -> boundary_z3 -- etldoc: osm_border_disp_linestring_gen_z3 -> boundary_z3
DROP MATERIALIZED VIEW IF EXISTS boundary_z3 CASCADE; CREATE OR REPLACE VIEW boundary_z3 AS
CREATE MATERIALIZED VIEW boundary_z3 AS
( (
SELECT geometry, SELECT geometry,
admin_level, admin_level,
@ -442,19 +465,18 @@ SELECT geometry,
admin_level, admin_level,
NULL::text AS adm0_l, NULL::text AS adm0_l,
NULL::text AS adm0_r, NULL::text AS adm0_r,
disputed, TRUE AS disputed,
disputed_name, edit_name(name) AS disputed_name,
claimed_by, claimed_by,
maritime maritime
FROM ne_10m_admin_0_boundary_lines_land_disputed FROM osm_border_disp_linestring_gen_z3
); );
CREATE INDEX IF NOT EXISTS boundary_z3_idx ON boundary_z3 USING gist (geometry);
-- etldoc: ne_10m_admin_0_boundary_lines_land_gen_z4 -> boundary_z4 -- etldoc: ne_10m_admin_0_boundary_lines_land_gen_z4 -> boundary_z4
-- etldoc: ne_10m_admin_1_states_provinces_lines_gen_z4 -> boundary_z4 -- etldoc: ne_10m_admin_1_states_provinces_lines_gen_z4 -> boundary_z4
-- etldoc: osm_border_linestring_gen_z4 -> boundary_z4 -- etldoc: osm_border_linestring_gen_z4 -> boundary_z4
DROP MATERIALIZED VIEW IF EXISTS boundary_z4 CASCADE; -- etldoc: osm_border_disp_linestring_gen_z4 -> boundary_z4
CREATE MATERIALIZED VIEW boundary_z4 AS CREATE OR REPLACE VIEW boundary_z4 AS
( (
SELECT geometry, SELECT geometry,
admin_level, admin_level,
@ -481,14 +503,26 @@ SELECT geometry,
adm0_l, adm0_l,
adm0_r, adm0_r,
disputed, disputed,
CASE WHEN disputed THEN edit_name(name) END AS disputed_name, NULL::text AS disputed_name,
claimed_by, NULL::text AS claimed_by,
maritime maritime
FROM osm_border_linestring_gen_z4 FROM osm_border_linestring_gen_z4
WHERE maritime = TRUE
AND admin_level <= 2
UNION ALL
SELECT geometry,
admin_level,
NULL::text AS adm0_l,
NULL::text AS adm0_r,
TRUE AS disputed,
edit_name(name) AS disputed_name,
claimed_by,
maritime
FROM osm_border_disp_linestring_gen_z4
); );
CREATE INDEX IF NOT EXISTS boundary_z4_idx ON boundary_z4 USING gist (geometry);
-- etldoc: osm_border_linestring_gen_z5 -> boundary_z5 -- etldoc: osm_border_linestring_gen_z5 -> boundary_z5
-- etldoc: osm_border_disp_linestring_gen_z5 -> boundary_z5
CREATE OR REPLACE VIEW boundary_z5 AS CREATE OR REPLACE VIEW boundary_z5 AS
( (
SELECT geometry, SELECT geometry,
@ -496,14 +530,27 @@ SELECT geometry,
adm0_l, adm0_l,
adm0_r, adm0_r,
disputed, disputed,
CASE WHEN disputed THEN edit_name(name) END AS disputed_name, NULL::text AS disputed_name,
claimed_by, NULL::text AS claimed_by,
maritime maritime
FROM osm_border_linestring_gen_z5 FROM osm_border_linestring_gen_z5
WHERE admin_level <= 4 WHERE admin_level <= 4
-- already not included in osm_border_linestring_adm
-- AND osm_id NOT IN (SELECT DISTINCT osm_id FROM osm_border_disp_linestring_gen_z5)
UNION ALL
SELECT geometry,
admin_level,
NULL::text AS adm0_l,
NULL::text AS adm0_r,
TRUE AS disputed,
edit_name(name) AS disputed_name,
claimed_by,
maritime
FROM osm_border_disp_linestring_gen_z5
); );
-- etldoc: osm_border_linestring_gen_z6 -> boundary_z6 -- etldoc: osm_border_linestring_gen_z6 -> boundary_z6
-- etldoc: osm_border_disp_linestring_gen_z6 -> boundary_z6
CREATE OR REPLACE VIEW boundary_z6 AS CREATE OR REPLACE VIEW boundary_z6 AS
( (
SELECT geometry, SELECT geometry,
@ -511,14 +558,26 @@ SELECT geometry,
adm0_l, adm0_l,
adm0_r, adm0_r,
disputed, disputed,
CASE WHEN disputed THEN edit_name(name) END AS disputed_name, NULL::text AS disputed_name,
claimed_by, NULL::text AS claimed_by,
maritime maritime
FROM osm_border_linestring_gen_z6 FROM osm_border_linestring_gen_z6
WHERE admin_level <= 4 WHERE admin_level <= 4
-- AND osm_id NOT IN (SELECT DISTINCT osm_id FROM osm_border_disp_linestring_gen_z6)
UNION ALL
SELECT geometry,
admin_level,
NULL::text AS adm0_l,
NULL::text AS adm0_r,
TRUE AS disputed,
edit_name(name) AS disputed_name,
claimed_by,
maritime
FROM osm_border_disp_linestring_gen_z6
); );
-- etldoc: osm_border_linestring_gen_z7 -> boundary_z7 -- etldoc: osm_border_linestring_gen_z7 -> boundary_z7
-- etldoc: osm_border_disp_linestring_gen_z7 -> boundary_z7
CREATE OR REPLACE VIEW boundary_z7 AS CREATE OR REPLACE VIEW boundary_z7 AS
( (
SELECT geometry, SELECT geometry,
@ -526,14 +585,26 @@ SELECT geometry,
adm0_l, adm0_l,
adm0_r, adm0_r,
disputed, disputed,
CASE WHEN disputed THEN edit_name(name) END AS disputed_name, NULL::text AS disputed_name,
claimed_by, NULL::text AS claimed_by,
maritime maritime
FROM osm_border_linestring_gen_z7 FROM osm_border_linestring_gen_z7
WHERE admin_level <= 6 WHERE admin_level <= 6
-- AND osm_id NOT IN (SELECT DISTINCT osm_id FROM osm_border_disp_linestring_gen_z7)
UNION ALL
SELECT geometry,
admin_level,
NULL::text AS adm0_l,
NULL::text AS adm0_r,
TRUE AS disputed,
edit_name(name) AS disputed_name,
claimed_by,
maritime
FROM osm_border_disp_linestring_gen_z7
); );
-- etldoc: osm_border_linestring_gen_z8 -> boundary_z8 -- etldoc: osm_border_linestring_gen_z8 -> boundary_z8
-- etldoc: osm_border_disp_linestring_gen_z8 -> boundary_z8
CREATE OR REPLACE VIEW boundary_z8 AS CREATE OR REPLACE VIEW boundary_z8 AS
( (
SELECT geometry, SELECT geometry,
@ -541,14 +612,26 @@ SELECT geometry,
adm0_l, adm0_l,
adm0_r, adm0_r,
disputed, disputed,
CASE WHEN disputed THEN edit_name(name) END AS disputed_name, NULL::text AS disputed_name,
claimed_by, NULL::text AS claimed_by,
maritime maritime
FROM osm_border_linestring_gen_z8 FROM osm_border_linestring_gen_z8
WHERE admin_level <= 6 WHERE admin_level <= 6
-- AND osm_id NOT IN (SELECT DISTINCT osm_id FROM osm_border_disp_linestring_gen_z8)
UNION ALL
SELECT geometry,
admin_level,
NULL::text AS adm0_l,
NULL::text AS adm0_r,
TRUE AS disputed,
edit_name(name) AS disputed_name,
claimed_by,
maritime
FROM osm_border_disp_linestring_gen_z8
); );
-- etldoc: osm_border_linestring_gen_z9 -> boundary_z9 -- etldoc: osm_border_linestring_gen_z9 -> boundary_z9
-- etldoc: osm_border_disp_linestring_gen_z9 -> boundary_z9
CREATE OR REPLACE VIEW boundary_z9 AS CREATE OR REPLACE VIEW boundary_z9 AS
( (
SELECT geometry, SELECT geometry,
@ -556,14 +639,26 @@ SELECT geometry,
adm0_l, adm0_l,
adm0_r, adm0_r,
disputed, disputed,
CASE WHEN disputed THEN edit_name(name) END AS disputed_name, NULL::text AS disputed_name,
claimed_by, NULL::text AS claimed_by,
maritime maritime
FROM osm_border_linestring_gen_z9 FROM osm_border_linestring_gen_z9
WHERE admin_level <= 6 WHERE admin_level <= 6
-- AND osm_id NOT IN (SELECT DISTINCT osm_id FROM osm_border_disp_linestring_gen_z9)
UNION ALL
SELECT geometry,
admin_level,
NULL::text AS adm0_l,
NULL::text AS adm0_r,
TRUE AS disputed,
edit_name(name) AS disputed_name,
claimed_by,
maritime
FROM osm_border_disp_linestring_gen_z9
); );
-- etldoc: osm_border_linestring_gen_z10 -> boundary_z10 -- etldoc: osm_border_linestring_gen_z10 -> boundary_z10
-- etldoc: osm_border_disp_linestring_gen_z10 -> boundary_z10
CREATE OR REPLACE VIEW boundary_z10 AS CREATE OR REPLACE VIEW boundary_z10 AS
( (
SELECT geometry, SELECT geometry,
@ -571,14 +666,26 @@ SELECT geometry,
adm0_l, adm0_l,
adm0_r, adm0_r,
disputed, disputed,
CASE WHEN disputed THEN edit_name(name) END AS disputed_name, NULL::text AS disputed_name,
claimed_by, NULL::text AS claimed_by,
maritime maritime
FROM osm_border_linestring_gen_z10 FROM osm_border_linestring_gen_z10
WHERE admin_level <= 6 WHERE admin_level <= 6
-- AND osm_id NOT IN (SELECT DISTINCT osm_id FROM osm_border_disp_linestring_gen_z10)
UNION ALL
SELECT geometry,
admin_level,
NULL::text AS adm0_l,
NULL::text AS adm0_r,
TRUE AS disputed,
edit_name(name) AS disputed_name,
claimed_by,
maritime
FROM osm_border_disp_linestring_gen_z10
); );
-- etldoc: osm_border_linestring_gen_z11 -> boundary_z11 -- etldoc: osm_border_linestring_gen_z11 -> boundary_z11
-- etldoc: osm_border_disp_linestring_gen_z11 -> boundary_z11
CREATE OR REPLACE VIEW boundary_z11 AS CREATE OR REPLACE VIEW boundary_z11 AS
( (
SELECT geometry, SELECT geometry,
@ -586,14 +693,26 @@ SELECT geometry,
adm0_l, adm0_l,
adm0_r, adm0_r,
disputed, disputed,
CASE WHEN disputed THEN edit_name(name) END AS disputed_name, NULL::text AS disputed_name,
claimed_by, NULL::text AS claimed_by,
maritime maritime
FROM osm_border_linestring_gen_z11 FROM osm_border_linestring_gen_z11
WHERE admin_level <= 8 WHERE admin_level <= 8
-- AND osm_id NOT IN (SELECT DISTINCT osm_id FROM osm_border_disp_linestring_gen_z11)
UNION ALL
SELECT geometry,
admin_level,
NULL::text AS adm0_l,
NULL::text AS adm0_r,
TRUE AS disputed,
edit_name(name) AS disputed_name,
claimed_by,
maritime
FROM osm_border_disp_linestring_gen_z11
); );
-- etldoc: osm_border_linestring_gen_z12 -> boundary_z12 -- etldoc: osm_border_linestring_gen_z12 -> boundary_z12
-- etldoc: osm_border_disp_linestring_gen_z12 -> boundary_z12
CREATE OR REPLACE VIEW boundary_z12 AS CREATE OR REPLACE VIEW boundary_z12 AS
( (
SELECT geometry, SELECT geometry,
@ -601,13 +720,25 @@ SELECT geometry,
adm0_l, adm0_l,
adm0_r, adm0_r,
disputed, disputed,
CASE WHEN disputed THEN edit_name(name) END AS disputed_name, NULL::text AS disputed_name,
claimed_by, NULL::text AS claimed_by,
maritime maritime
FROM osm_border_linestring_gen_z12 FROM osm_border_linestring_gen_z12
--WHERE osm_id NOT IN (SELECT DISTINCT osm_id FROM osm_border_disp_linestring_gen_z12)
UNION ALL
SELECT geometry,
admin_level,
NULL::text AS adm0_l,
NULL::text AS adm0_r,
TRUE AS disputed,
edit_name(name) AS disputed_name,
claimed_by,
maritime
FROM osm_border_disp_linestring_gen_z12
); );
-- etldoc: osm_border_linestring_gen_z13 -> boundary_z13 -- etldoc: osm_border_linestring_gen_z13 -> boundary_z13
-- etldoc: osm_border_disp_linestring_gen_z13 -> boundary_z13
CREATE OR REPLACE VIEW boundary_z13 AS CREATE OR REPLACE VIEW boundary_z13 AS
( (
SELECT geometry, SELECT geometry,
@ -615,14 +746,25 @@ SELECT geometry,
adm0_l, adm0_l,
adm0_r, adm0_r,
disputed, disputed,
CASE WHEN disputed THEN edit_name(name) END AS disputed_name, NULL::text AS disputed_name,
claimed_by, NULL::text AS claimed_by,
maritime maritime
FROM osm_border_linestring_gen_z13 FROM osm_border_linestring_gen_z13
--WHERE osm_id NOT IN (SELECT DISTINCT osm_id FROM osm_border_disp_linestring_gen_z13)
UNION ALL
SELECT geometry,
admin_level,
NULL::text AS adm0_l,
NULL::text AS adm0_r,
TRUE AS disputed,
edit_name(name) AS disputed_name,
claimed_by,
maritime
FROM osm_border_disp_linestring_gen_z13
); );
-- etldoc: layer_boundary[shape=record fillcolor=lightpink, style="rounded,filled", -- etldoc: layer_boundary[shape=record fillcolor=lightpink, style="rounded,filled",
-- etldoc: label="<sql> layer_boundary |<z0> z0 |<z1> z1 |<z2> z2 | <z3> z3 | <z4> z4 | <z5> z5 | <z6> z6 | <z7> z7 | <z8> z8 | <z9> z9 |<z10> z10 |<z11> z11 |<z12> z12|<z13> z13|<z14> z14+"] -- etldoc: label="<sql> layer_boundary |<z0> z0 |<z1> z1 |<z2> z2 | <z3> z3 | <z4> z4 | <z5> z5 | <z6> z6 | <z7> z7 | <z8> z8 | <z9> z9 |<z10> z10 |<z11> z11 |<z12> z12|<z13> z13+"]
CREATE OR REPLACE FUNCTION layer_boundary(bbox geometry, zoom_level int) CREATE OR REPLACE FUNCTION layer_boundary(bbox geometry, zoom_level int)
RETURNS TABLE RETURNS TABLE
( (
@ -633,14 +775,11 @@ CREATE OR REPLACE FUNCTION layer_boundary(bbox geometry, zoom_level int)
disputed int, disputed int,
disputed_name text, disputed_name text,
claimed_by text, claimed_by text,
maritime int, maritime int
class text,
name text,
tags hstore
) )
AS AS
$$ $$
SELECT geometry, admin_level, adm0_l, adm0_r, disputed::int, disputed_name, claimed_by, maritime::int, NULL::text, NULL::text, NULL::hstore SELECT geometry, admin_level, adm0_l, adm0_r, disputed::int, disputed_name, claimed_by, maritime::int
FROM ( FROM (
-- etldoc: boundary_z0 -> layer_boundary:z0 -- etldoc: boundary_z0 -> layer_boundary:z0
SELECT * SELECT *
@ -725,134 +864,7 @@ FROM (
FROM boundary_z13 FROM boundary_z13
WHERE geometry && bbox WHERE geometry && bbox
AND zoom_level >= 13 AND zoom_level >= 13
) AS segment_zoom_levels ) AS zoom_levels;
UNION ALL
SELECT geometry, NULL::int, NULL::text, NULL::text, NULL::int, NULL::text, NULL::text, NULL::int, class, name, tags
FROM (
-- etldoc: osm_boundary_polygon_gen_z4 -> layer_boundary:z4
SELECT geometry,
boundary AS class,
name,
tags
FROM osm_boundary_polygon_gen_z4
WHERE zoom_level = 4
AND geometry && bbox
UNION ALL
-- etldoc: osm_boundary_polygon_gen_z5 -> layer_boundary:z5
SELECT geometry,
boundary AS class,
name,
tags
FROM osm_boundary_polygon_gen_z5
WHERE zoom_level = 5
AND geometry && bbox
UNION ALL
-- etldoc: osm_boundary_polygon_gen_z6 -> layer_boundary:z6
SELECT geometry,
boundary AS class,
name,
tags
FROM osm_boundary_polygon_gen_z6
WHERE zoom_level = 6
AND geometry && bbox
UNION ALL
-- etldoc: osm_boundary_polygon_gen_z7 -> layer_boundary:z7
SELECT geometry,
boundary AS class,
name,
tags
FROM osm_boundary_polygon_gen_z7
WHERE zoom_level = 7
AND geometry && bbox
UNION ALL
-- etldoc: osm_boundary_polygon_gen_z8 -> layer_boundary:z8
SELECT geometry,
boundary AS class,
name,
tags
FROM osm_boundary_polygon_gen_z8
WHERE zoom_level = 8
AND geometry && bbox
UNION ALL
-- etldoc: osm_boundary_polygon_gen_z9 -> layer_boundary:z9
SELECT geometry,
boundary AS class,
name,
tags
FROM osm_boundary_polygon_gen_z9
WHERE zoom_level = 9
AND geometry && bbox
UNION ALL
-- etldoc: osm_boundary_polygon_gen_z10 -> layer_boundary:z10
SELECT geometry,
boundary AS class,
name,
tags
FROM osm_boundary_polygon_gen_z10
WHERE zoom_level = 10
AND geometry && bbox
UNION ALL
-- etldoc: osm_boundary_polygon_gen_z11 -> layer_boundary:z11
SELECT geometry,
boundary AS class,
name,
tags
FROM osm_boundary_polygon_gen_z11
WHERE zoom_level = 11
AND geometry && bbox
UNION ALL
-- etldoc: osm_boundary_polygon_gen_z12 -> layer_boundary:z12
SELECT geometry,
boundary AS class,
name,
tags
FROM osm_boundary_polygon_gen_z12
WHERE zoom_level = 12
AND geometry && bbox
UNION ALL
-- etldoc: osm_boundary_polygon_gen_z13 -> layer_boundary:z13
SELECT geometry,
boundary AS class,
name,
tags
FROM osm_boundary_polygon_gen_z13
WHERE zoom_level = 13
AND geometry && bbox
UNION ALL
-- etldoc: osm_boundary_polygon -> layer_boundary:z14
SELECT geometry,
boundary AS class,
name,
tags
FROM osm_boundary_polygon
WHERE zoom_level = 14
AND geometry && bbox
) AS area_zoom_levels
$$ LANGUAGE SQL STABLE $$ LANGUAGE SQL STABLE
-- STRICT -- STRICT
PARALLEL SAFE; PARALLEL SAFE;

View File

@ -1,25 +1,13 @@
layer: layer:
id: "boundary" id: "boundary"
requires:
tables:
- osm_border_linestring
- ne_10m_admin_0_countries
- ne_10m_admin_0_boundary_lines_land
- ne_10m_admin_1_states_provinces_lines
- ne_50m_admin_0_boundary_lines_land
- ne_110m_admin_0_boundary_lines_land
description: | description: |
Contains administrative boundaries as linestrings and aboriginal lands as polygons. Contains administrative boundaries as linestrings.
Until z4 [Natural Earth data](http://www.naturalearthdata.com/downloads/) is used after which Until z4 [Natural Earth data](http://www.naturalearthdata.com/downloads/) is used after which
OSM boundaries ([`boundary=administrative`](http://wiki.openstreetmap.org/wiki/Tag:boundary%3Dadministrative)) OSM boundaries ([`boundary=administrative`](http://wiki.openstreetmap.org/wiki/Tag:boundary%3Dadministrative))
are present from z5 to z14 (also for maritime boundaries with `admin_level <= 2` at z4). are present from z5 to z14 (also for maritime boundaries with `admin_level <= 2` at z4).
OSM data contains several [`admin_level`](http://wiki.openstreetmap.org/wiki/Tag:boundary%3Dadministrative#admin_level) OSM data contains several [`admin_level`](http://wiki.openstreetmap.org/wiki/Tag:boundary%3Dadministrative#admin_level)
but for most styles it makes sense to just style `admin_level=2` and `admin_level=4`. but for most styles it makes sense to just style `admin_level=2` and `admin_level=4`.
fields: fields:
class:
description: |
Use the **class** to differentiate between different kinds of boundaries. The class for `boundary=aboriginal_lands` is `aboriginal_lands`.
name: The OSM [`name`](http://wiki.openstreetmap.org/wiki/Key:name) value (area features only).
admin_level: | admin_level: |
OSM [admin_level](http://wiki.openstreetmap.org/wiki/Tag:boundary%3Dadministrative#admin_level) OSM [admin_level](http://wiki.openstreetmap.org/wiki/Tag:boundary%3Dadministrative#admin_level)
indicating the level of importance of this boundary. indicating the level of importance of this boundary.
@ -62,9 +50,8 @@ layer:
buffer_size: 4 buffer_size: 4
datasource: datasource:
geometry_field: geometry geometry_field: geometry
query: (SELECT geometry, admin_level, adm0_l, adm0_r, disputed, disputed_name, claimed_by, maritime, class, name, {name_languages} FROM layer_boundary(!bbox!, z(!scale_denominator!))) AS t query: (SELECT geometry, admin_level, adm0_l, adm0_r, disputed, disputed_name, claimed_by, maritime FROM layer_boundary(!bbox!, z(!scale_denominator!))) AS t
schema: schema:
- ./update_boundary_polygon.sql
- ./boundary_name.sql - ./boundary_name.sql
- ./boundary.sql - ./boundary.sql
datasources: datasources:

View File

@ -1,25 +1,23 @@
DROP TABLE IF EXISTS osm_border_linestring_adm CASCADE; DROP TABLE IF EXISTS osm_border_linestring_adm CASCADE;
-- etldoc: osm_border_linestring -> osm_border_linestring_adm -- etldoc: osm_border_linestring -> osm_border_linestring_adm
-- etldoc: osm_border_disp_linestring -> osm_border_linestring_adm
-- etldoc: ne_10m_admin_0_countries -> osm_border_linestring_adm
CREATE TABLE IF NOT EXISTS osm_border_linestring_adm AS ( CREATE TABLE IF NOT EXISTS osm_border_linestring_adm AS (
WITH WITH
-- Prepare lines from osm to be merged -- Prepare lines from osm to be merged
multiline AS ( multiline AS (
SELECT osm_id, SELECT ST_Node(ST_Collect(geometry)) AS geometry,
ST_Node(ST_Collect(geometry)) AS geometry, maritime,
BOOL_OR(maritime) AS maritime, disputed
FALSE AS disputed
FROM osm_border_linestring FROM osm_border_linestring
WHERE admin_level = 2 AND ST_Dimension(geometry) = 1 WHERE admin_level = 2
AND osm_id NOT IN (SELECT DISTINCT osm_id FROM osm_border_disp_linestring) AND osm_id NOT IN (SELECT DISTINCT osm_id FROM osm_border_disp_linestring)
GROUP BY osm_id GROUP BY maritime,
disputed
), ),
mergedline AS ( mergedline AS (
SELECT osm_id, SELECT (ST_Dump(
(ST_Dump(ST_LineMerge(geometry))).geom AS geometry, ST_LineMerge(geometry))).geom AS geometry,
maritime, maritime,
disputed disputed
FROM multiline FROM multiline
@ -34,7 +32,7 @@ CREATE TABLE IF NOT EXISTS osm_border_linestring_adm AS (
FROM (SELECT ST_Node( FROM (SELECT ST_Node(
ST_Collect(geometry)) AS geometry ST_Collect(geometry)) AS geometry
FROM osm_border_linestring FROM osm_border_linestring
WHERE admin_level = 2 AND ST_Dimension(geometry) = 1 WHERE admin_level = 2
) nodes ) nodes
) linemerge ) linemerge
), ),
@ -57,14 +55,12 @@ CREATE TABLE IF NOT EXISTS osm_border_linestring_adm AS (
), ),
rights AS ( rights AS (
SELECT osm_id, SELECT adm0_r,
adm0_r,
geometry, geometry,
maritime, maritime,
disputed disputed
FROM ( FROM (
SELECT a.osm_id AS osm_id, SELECT b.adm0_a3 AS adm0_r,
b.adm0_a3 AS adm0_r,
a.geometry, a.geometry,
a.maritime, a.maritime,
a.disputed a.disputed
@ -77,16 +73,14 @@ CREATE TABLE IF NOT EXISTS osm_border_linestring_adm AS (
) line_rights ) line_rights
) )
SELECT osm_id, SELECT adm0_l,
adm0_l,
adm0_r, adm0_r,
geometry, geometry,
maritime, maritime,
2::integer AS admin_level, 2::integer AS admin_level,
disputed disputed
FROM ( FROM (
SELECT r.osm_id AS osm_id, SELECT b.adm0_a3 AS adm0_l,
b.adm0_a3 AS adm0_l,
r.adm0_r AS adm0_r, r.adm0_r AS adm0_r,
r.geometry, r.geometry,
r.maritime, r.maritime,

Binary file not shown.

Before

Width:  |  Height:  |  Size: 750 KiB

After

Width:  |  Height:  |  Size: 668 KiB

View File

@ -1,171 +1,90 @@
generalized_tables: generalized_tables:
# etldoc: osm_border_linestring -> osm_border_disp_linestring
border_disp_linestring:
source: border_linestring
sql_filter: ST_GeometryType(geometry) = 'ST_LineString' AND (disputed OR dispute OR border_status = 'disputed' OR disputed_by <> '') AND admin_level = 2
# etldoc: osm_boundary_polygon_gen_z5 -> osm_boundary_polygon_gen_z4 # etldoc: osm_border_disp_linestring_gen_z2 -> osm_border_disp_linestring_gen_z1
boundary_polygon_gen_z4: border_disp_linestring_gen_z1:
source: boundary_polygon_gen_z5 source: border_disp_linestring_gen_z2
sql_filter: area>power(ZRES3,2) sql_filter: admin_level = 2
tolerance: ZRES2
# etldoc: osm_border_disp_linestring_gen_z3 -> osm_border_disp_linestring_gen_z2
border_disp_linestring_gen_z2:
source: border_disp_linestring_gen_z3
sql_filter: admin_level = 2
tolerance: ZRES3
# etldoc: osm_border_disp_linestring_gen_z4 -> osm_border_disp_linestring_gen_z3
border_disp_linestring_gen_z3:
source: border_disp_linestring_gen_z4
sql_filter: admin_level = 2
tolerance: ZRES4 tolerance: ZRES4
# etldoc: osm_boundary_polygon_gen_z6 -> osm_boundary_polygon_gen_z5 # etldoc: osm_border_disp_linestring_gen_z5 -> osm_border_disp_linestring_gen_z4
boundary_polygon_gen_z5: border_disp_linestring_gen_z4:
source: boundary_polygon_gen_z6 source: border_disp_linestring_gen_z5
sql_filter: area>power(ZRES4,2) sql_filter: admin_level = 2
tolerance: ZRES5 tolerance: ZRES5
# etldoc: osm_boundary_polygon_gen_z7 -> osm_boundary_polygon_gen_z6 # etldoc: osm_border_disp_linestring_gen_z6 -> osm_border_disp_linestring_gen_z5
boundary_polygon_gen_z6: border_disp_linestring_gen_z5:
source: boundary_polygon_gen_z7 source: border_disp_linestring_gen_z6
sql_filter: area>power(ZRES5,2) sql_filter: admin_level = 2
tolerance: ZRES6 tolerance: ZRES6
# etldoc: osm_boundary_polygon_gen_z8 -> osm_boundary_polygon_gen_z7 # etldoc: osm_border_disp_linestring_gen_z7 -> osm_border_disp_linestring_gen_z6
boundary_polygon_gen_z7: border_disp_linestring_gen_z6:
source: boundary_polygon_gen_z8 source: border_disp_linestring_gen_z7
sql_filter: area>power(ZRES6,2) sql_filter: admin_level = 2
tolerance: ZRES7 tolerance: ZRES7
# etldoc: osm_boundary_polygon_gen_z9 -> osm_boundary_polygon_gen_z8 # etldoc: osm_border_disp_linestring_gen_z8 -> osm_border_disp_linestring_gen_z7
boundary_polygon_gen_z8: border_disp_linestring_gen_z7:
source: boundary_polygon_gen_z9 source: border_disp_linestring_gen_z8
sql_filter: area>power(ZRES7,2) sql_filter: admin_level = 2
tolerance: ZRES8 tolerance: ZRES8
# etldoc: osm_boundary_polygon_gen_z10 -> osm_boundary_polygon_gen_z9 # etldoc: osm_border_disp_linestring_gen_z9 -> osm_border_disp_linestring_gen_z8
boundary_polygon_gen_z9: border_disp_linestring_gen_z8:
source: boundary_polygon_gen_z10 source: border_disp_linestring_gen_z9
sql_filter: area>power(ZRES8,2) sql_filter: admin_level = 2
tolerance: ZRES9 tolerance: ZRES9
# etldoc: osm_boundary_polygon_gen_z11 -> osm_boundary_polygon_gen_z10 # etldoc: osm_border_disp_linestring_gen_z10 -> osm_border_disp_linestring_gen_z9
boundary_polygon_gen_z10: border_disp_linestring_gen_z9:
source: boundary_polygon_gen_z11 source: border_disp_linestring_gen_z10
sql_filter: area>power(ZRES9,2) sql_filter: admin_level = 2
tolerance: ZRES10 tolerance: ZRES10
# etldoc: osm_boundary_polygon_gen_z12 -> osm_boundary_polygon_gen_z11 # etldoc: osm_border_disp_linestring_gen_z11 -> osm_border_disp_linestring_gen_z10
boundary_polygon_gen_z11: border_disp_linestring_gen_z10:
source: boundary_polygon_gen_z12 source: border_disp_linestring_gen_z11
sql_filter: area>power(ZRES10,2) sql_filter: admin_level = 2
tolerance: ZRES11 tolerance: ZRES11
# etldoc: osm_boundary_polygon_gen_z13 -> osm_boundary_polygon_gen_z12 # etldoc: osm_border_disp_linestring_gen_z12 -> osm_border_disp_linestring_gen_z11
boundary_polygon_gen_z12: border_disp_linestring_gen_z11:
source: boundary_polygon_gen_z13 source: border_disp_linestring_gen_z12
sql_filter: area>power(ZRES11,2) sql_filter: admin_level = 2
tolerance: ZRES12 tolerance: ZRES12
# etldoc: osm_boundary_polygon -> osm_boundary_polygon_gen_z13 # etldoc: osm_border_disp_linestring_gen_z13 -> osm_border_disp_linestring_gen_z12
boundary_polygon_gen_z13: border_disp_linestring_gen_z12:
source: boundary_polygon source: border_disp_linestring_gen_z13
sql_filter: area>power(ZRES12,2) AND ST_IsValid(geometry) sql_filter: admin_level = 2
tolerance: ZRES13 tolerance: ZRES13
# etldoc: osm_border_disp_linestring -> osm_border_disp_linestring_gen_z13
border_disp_linestring_gen_z13:
source: border_disp_linestring
sql_filter: admin_level = 2
tolerance: ZRES14
# etldoc: osm_border_disp_relation -> osm_border_disp_linestring
border_disp_linestring:
source: border_disp_relation
sql_filter: ST_GeometryType(geometry) = 'ST_LineString'
tables: tables:
# etldoc: imposm3 -> osm_border_linestring
border_linestring:
type: relation_member
filters:
require:
admin_level: [__any__]
boundary: [administrative]
columns:
- name: relation_id
type: id
- name: osm_id
type: id
from_member: true
- name: member
type: member_id
- name: type
type: member_type
- name: geometry
type: geometry
- key: name
name: name
type: string
# Used for disputed boundary, e.g. "Line of actual control"
from_member: true
- key: admin_level
name: admin_level
type: integer
- key: claimed_by
name: claimed_by
type: string
- key: disputed_by
name: disputed_by
type: string
from_member: true
- key: dispute
name: dispute
type: bool
from_member: true
- key: disputed
name: disputed
type: bool
from_member: true
- key: border_status
name: border_status
type: string
from_member: true
- key: maritime
name: maritime
type: bool
from_member: true
- name: index
type: member_index
- name: role
type: member_role
# - name: type
# type: member_type
- key: boundary_type
name: boundary_type
type: string
from_member: true
- key: natural
name: natural
type: string
from_member: true
relation_types: [boundary]
mapping:
boundary:
- administrative
border_status:
- dispute
boundary_type:
- maritime
# etldoc: imposm3 -> osm_boundary_polygon
boundary_polygon:
type: polygon
filters:
require:
type: [boundary]
boundary: [aboriginal_lands]
columns:
- name: osm_id
type: id
- name: geometry
type: validated_geometry
- name: name
key: name
type: string
- name: tags
type: hstore_tags
- name: boundary
key: boundary
type: string
- name: area
type: area
mapping:
boundary:
- aboriginal_lands
# etldoc: imposm3 -> osm_border_disp_relation # etldoc: imposm3 -> osm_border_disp_relation
border_disp_relation: border_disp_relation:
type: relation_member type: relation_member
@ -196,96 +115,10 @@ tables:
name: maritime name: maritime
type: bool type: bool
from_member: true from_member: true
- name: index
type: member_index
- name: role
type: member_role
- name: type
type: member_type
mapping: mapping:
type: [boundary] type: [boundary]
filters: filters:
require: require:
#admin_level: ['2'] # this used to be specified, re-enable if bugs show up with country borders #admin_level: ['2']
admin_level: [__any__] admin_level: [__any__]
boundary: ['administrative'] # Filters out boundary administrative_fraction and religious_administration claimed_by: [__any__]
# FOr NUTS in linestring version
administrative_relation:
type: relation
columns:
- name: osm_id
type: id
- key: name
name: name
type: string
- name: name_en
key: name:en
type: string
- name: name_nl
key: name:nl
type: string
- name: name_de
key: name:de
type: string
- name: name_fr
key: name:fr
type: string
- key: boundary
name: boundary
type: string
- key: admin_level
name: admin_level
type: integer
mapping:
boundary: [ 'administrative' ]
filters:
require:
admin_level: [ __any__ ]
administrative_member:
type: relation_member
columns:
- name: relation_id
type: id
- name: boundary_id
type: id
from_member: true
- key: admin_level
name: admin_level
type: integer
- key: maritime
name: maritime
type: bool
from_member: true
- name: index
type: member_index
- name: role
type: member_role
- name: type
type: member_type
mapping:
type: [boundary]
filters:
require:
admin_level: [__any__]
boundary: ['administrative']
administrative_boundary:
type: linestring
columns:
- name: osm_id
type: id
- name: geometry
type: geometry
- key: admin_level
name: admin_level
type: integer
- key: maritime
name: maritime
type: bool
mapping:
boundary: [ 'administrative' ]
filters:
require:
admin_level: [ __any__ ]

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 4.4 KiB

View File

@ -1,193 +0,0 @@
-- This is very crude and not finetuned yet
-- This statement can be deleted after the border importer image stops creating this object as a table
DO
$$
BEGIN
DROP TABLE IF EXISTS osm_boundary_polygon_nuts CASCADE;
EXCEPTION
WHEN wrong_object_type THEN
END;
$$ LANGUAGE plpgsql;
-- etldoc: osm_border_linestring -> osm_border_linestring_gen_z13
-- etldoc: osm_border_linestring_adm -> osm_border_linestring_gen_z13
DROP MATERIALIZED VIEW IF EXISTS osm_boundary_polygon_nuts CASCADE;
CREATE MATERIALIZED VIEW osm_boundary_polygon_nuts AS
(
SELECT r.osm_id as relation_id,
r.name,
r.name_en,
r.name_nl,
r.name_de,
r.name_fr,
r.admin_level,
p.geometry
FROM (
SELECT relation_id,
ST_BuildArea(ST_Node(ST_Collect(geometry))) as geometry
FROM osm_border_disp_relation
WHERE (role = 'outer' or role = 'inner')
AND ST_GeometryType(geometry) = 'ST_LineString'
GROUP BY relation_id
) as p
LEFT JOIN osm_administrative_relation as r on r.osm_id = p.relation_id
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS osm_boundary_polygon_idx ON osm_boundary_polygon_nuts USING gist (geometry);
-- etldoc: osm_border_linestring -> osm_border_linestring_gen_z13
-- etldoc: osm_border_linestring_adm -> osm_border_linestring_gen_z13
DROP MATERIALIZED VIEW IF EXISTS osm_boundary_linestring CASCADE;
CREATE MATERIALIZED VIEW osm_boundary_linestring AS
SELECT osm_id,
geometry,
nuts_level,
nuts ->> 'l_nuts_0_name' as l_nuts_0_name,
nuts ->> 'l_nuts_1_name' as l_nuts_1_name,
nuts ->> 'l_nuts_2_name' as l_nuts_2_name,
nuts ->> 'l_nuts_3_name' as l_nuts_3_name,
nuts ->> 'l_nuts_4_name' as l_nuts_4_name,
nuts ->> 'l_nuts_5_name' as l_nuts_5_name,
nuts ->> 'r_nuts_0_name' as r_nuts_0_name,
nuts ->> 'r_nuts_1_name' as r_nuts_1_name,
nuts ->> 'r_nuts_2_name' as r_nuts_2_name,
nuts ->> 'r_nuts_3_name' as r_nuts_3_name,
nuts ->> 'r_nuts_4_name' as r_nuts_4_name,
nuts ->> 'r_nuts_5_name' as r_nuts_5_name
-- Shouldnt be needed for the map
-- nuts->'l_nuts_1_id' as l_nuts_1_id,
-- nuts->'l_nuts_2_id' as l_nuts_2_id,
-- nuts->'l_nuts_3_id' as l_nuts_3_id,
-- nuts->'l_nuts_4_id' as l_nuts_4_id,
-- nuts->'l_nuts_5_id' as l_nuts_5_id,
-- nuts->'r_nuts_1_id' as r_nuts_1_id,
-- nuts->'r_nuts_2_id' as r_nuts_2_id,
-- nuts->'r_nuts_3_id' as r_nuts_3_id,
-- nuts->'r_nuts_4_id' as r_nuts_4_id,
-- nuts->'r_nuts_5_id' as r_nuts_5_id
FROM (
SELECT osm_id,
geometry,
MIN(nuts_level) as nuts_level,
jsonb_object_agg(
CONCAT(side, '_nuts_', nuts_level, '_name'), name
)
|| jsonb_object_agg(
CONCAT(side, '_nuts_', nuts_level, '_id'), -relation_id
) as nuts
FROM (
SELECT b.osm_id,
b.geometry,
CASE
WHEN r.admin_level = 10 THEN 6
WHEN r.admin_level = 9 THEN 5
WHEN r.admin_level = 8 THEN 4
WHEN r.admin_level = 7 THEN 3
WHEN r.admin_level = 6 THEN 2
WHEN r.admin_level = 4 THEN 1
-- No admin_level =3?
WHEN r.admin_level = 2 THEN 0
-- All other are stored as low priority NUTS, for future reference
ELSE 1000 + r.admin_level
END as nuts_level,
COALESCE(NULLIF(r.name_en,''), NULLIF(r.name,''), NULL) as name,
r.relation_id,
CASE
WHEN
ST_Within(
ST_OffsetCurve(
(ST_LineSubString(b.geometry, 0.499, 0.501)), 10,
'quad_segs=4 join=mitre'
),
r.geometry
)
THEN 'r'
WHEN
ST_Within(
ST_OffsetCurve(
(ST_LineSubString(b.geometry, 0.499, 0.501)), -10,
'quad_segs=4 join=mitre'
),
r.geometry
)
THEN 'l'
ELSE 'unknown' -- TODO: Debug if this ever happens, if so our method isn't fool proof
END as side
FROM osm_administrative_boundary as b
INNER JOIN osm_administrative_member as m
ON b.osm_id = m.boundary_id
INNER JOIN osm_boundary_polygon_nuts as r
ON m.relation_id = r.relation_id
) as g
GROUP BY osm_id, geometry
) as p /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS osm_boundary_linestring_idx ON osm_boundary_linestring USING gist (geometry);
-- etldoc: layer_boundary[shape=record fillcolor=lightpink, style="rounded,filled",
-- etldoc: label="<sql> layer_boundary |<z0> z0 |<z1> z1 |<z2> z2 | <z3> z3 | <z4> z4 | <z5> z5 | <z6> z6 | <z7> z7 | <z8> z8 | <z9> z9 |<z10> z10 |<z11> z11 |<z12> z12|<z13> z13+"]
CREATE OR REPLACE FUNCTION layer_nuts(bbox geometry, zoom_level int)
RETURNS TABLE
(
geometry geometry,
nuts_level int,
l_nuts_0_name text,
l_nuts_1_name text,
l_nuts_2_name text,
l_nuts_3_name text,
l_nuts_4_name text,
l_nuts_5_name text,
r_nuts_0_name text,
r_nuts_1_name text,
r_nuts_2_name text,
r_nuts_3_name text,
r_nuts_4_name text,
r_nuts_5_name text
)
AS
$$
SELECT geometry,
nuts_level,
l_nuts_0_name,
l_nuts_1_name,
l_nuts_2_name,
l_nuts_3_name,
l_nuts_4_name,
l_nuts_5_name,
r_nuts_0_name,
r_nuts_1_name,
r_nuts_2_name,
r_nuts_3_name,
r_nuts_4_name,
r_nuts_5_name
FROM osm_boundary_linestring
WHERE geometry && bbox
AND zoom_level >
(CASE
WHEN nuts_level = 0 THEN 2
WHEN nuts_level = 1 THEN 4
WHEN nuts_level = 2 THEN 6
WHEN nuts_level = 3 THEN 6
WHEN nuts_level = 4 THEN 8
WHEN nuts_level = 5 THEN 10
END)
$$ LANGUAGE SQL STABLE
-- STRICT
PARALLEL SAFE;
/*
r.name,
CASE
WHEN r.admin_level = 10 THEN 6
WHEN r.admin_level = 9 THEN 5
WHEN r.admin_level = 8 THEN 4
WHEN r.admin_level = 7 THEN 3
WHEN r.admin_level = 6 THEN 2
WHEN r.admin_level = 4 THEN 1
-- No admin_level =3?
WHEN r.admin_level = 2 THEN 0
ELSE null
END as nuts_level,
*/

View File

@ -1,49 +0,0 @@
layer:
id: "nuts"
description: |
Contains administrative boundaries as linestrings (municipalities, counties, provinces, ...)
Administrative regions are translated to their equivalent NUTS/LAU classification
Fields indicate which NUTS-region is to the left and right of the linestring
fields:
nuts_level:
The mininum NUTS/LAU classification this linestring is part of.
NUTS only goes to 3 thus LAU 1 & 2 are mapped as NUTS 4 & 5.
NUTS 0 = Countries
NUTS 1 = Regions (e.g. Vlaams-brabant)
NUTS 2 = Provinces (e.g. Limburg)
NUTS 3 = Administrative arrondissements (e.g. Antwerpen, best to ignore these)
NUTS 4 = Municipalities (e.g. Lummen)
NUTS 5 = Villages/Suburbs (e.g. Linkhout)
l_nuts_0_name: |
Country on the left side of the linestring
l_nuts_1_name: |
Region on the left side of the linestring
l_nuts_2_name: |
Province on the left side of the linestring
l_nuts_3_name: |
Administrative arrondissement on the left side of the linestring
l_nuts_4_name: |
Municipality on the left side of the linestring
l_nuts_5_name: |
Village/suburb on the left side of the linestring
r_nuts_0_name: |
Country on the right side of the linestring
r_nuts_1_name: |
Region on the right side of the linestring
r_nuts_2_name: |
Province on the right side of the linestring
r_nuts_3_name: |
Administrative arrondissement on the right side of the linestring
r_nuts_4_name: |
Municipality on the right side of the linestring
r_nuts_5_name: |
Village/suburb on the right side of the linestring
buffer_size: 4
datasource:
geometry_field: geometry
query: (SELECT geometry, nuts_level, l_nuts_0_name, l_nuts_1_name, l_nuts_2_name, l_nuts_3_name, l_nuts_4_name, l_nuts_5_name, r_nuts_0_name, r_nuts_1_name, r_nuts_2_name, r_nuts_3_name, r_nuts_4_name, r_nuts_5_name FROM layer_nuts(!bbox!, z(!scale_denominator!))) AS t
schema:
- ./nuts.sql
datasources:
- type: imposm3
mapping_file: ./mapping.yaml

View File

@ -1,26 +0,0 @@
layer:
id: "nuts"
description: |
Contains administrative boundaries as polygons (municipalities, counties, provinces, ...)
Administrative regions are translated to their equivalent NUTS/LAU classification
fields:
nuts_level:
The NUTS/LAU classification this polygon fall in.
NUTS only goes to 3 thus LAU 1 & 2 are mapped as NUTS 4 & 5.
NUTS 0 = Countries
NUTS 1 = Regions (e.g. Vlaams-brabant)
NUTS 2 = Provinces (e.g. Limburg)
NUTS 3 = Administrative arrondissements (e.g. Antwerpen, best to ignore these)
NUTS 4 = Municipalities (e.g. Lummen)
NUTS 5 = Villages/Suburbs (e.g. Linkhout)
name: |
Name of the region
buffer_size: 4
datasource:
geometry_field: geometry
query: (SELECT geometry, nuts_level, name FROM osm_boundary_polygon) AS t
schema:
- ./nuts.sql
datasources:
- type: imposm3
mapping_file: ./mapping.yaml

View File

@ -1,3 +0,0 @@
{
"layers": []
}

View File

@ -1,170 +0,0 @@
ALTER TABLE osm_boundary_polygon
ADD COLUMN IF NOT EXISTS geometry_point geometry;
ALTER TABLE osm_boundary_polygon_gen_z13
ADD COLUMN IF NOT EXISTS geometry_point geometry;
ALTER TABLE osm_boundary_polygon_gen_z12
ADD COLUMN IF NOT EXISTS geometry_point geometry;
ALTER TABLE osm_boundary_polygon_gen_z11
ADD COLUMN IF NOT EXISTS geometry_point geometry;
ALTER TABLE osm_boundary_polygon_gen_z10
ADD COLUMN IF NOT EXISTS geometry_point geometry;
ALTER TABLE osm_boundary_polygon_gen_z9
ADD COLUMN IF NOT EXISTS geometry_point geometry;
ALTER TABLE osm_boundary_polygon_gen_z8
ADD COLUMN IF NOT EXISTS geometry_point geometry;
ALTER TABLE osm_boundary_polygon_gen_z7
ADD COLUMN IF NOT EXISTS geometry_point geometry;
ALTER TABLE osm_boundary_polygon_gen_z6
ADD COLUMN IF NOT EXISTS geometry_point geometry;
ALTER TABLE osm_boundary_polygon_gen_z5
ADD COLUMN IF NOT EXISTS geometry_point geometry;
DROP TRIGGER IF EXISTS update_row ON osm_boundary_polygon;
DROP TRIGGER IF EXISTS update_row ON osm_boundary_polygon_gen_z13;
DROP TRIGGER IF EXISTS update_row ON osm_boundary_polygon_gen_z12;
DROP TRIGGER IF EXISTS update_row ON osm_boundary_polygon_gen_z11;
DROP TRIGGER IF EXISTS update_row ON osm_boundary_polygon_gen_z10;
DROP TRIGGER IF EXISTS update_row ON osm_boundary_polygon_gen_z9;
DROP TRIGGER IF EXISTS update_row ON osm_boundary_polygon_gen_z8;
DROP TRIGGER IF EXISTS update_row ON osm_boundary_polygon_gen_z7;
DROP TRIGGER IF EXISTS update_row ON osm_boundary_polygon_gen_z6;
DROP TRIGGER IF EXISTS update_row ON osm_boundary_polygon_gen_z5;
-- etldoc: osm_boundary_polygon -> osm_boundary_polygon
-- etldoc: osm_boundary_polygon_gen_z13 -> osm_boundary_polygon_gen_z13
-- etldoc: osm_boundary_polygon_gen_z12 -> osm_boundary_polygon_gen_z12
-- etldoc: osm_boundary_polygon_gen_z11 -> osm_boundary_polygon_gen_z11
-- etldoc: osm_boundary_polygon_gen_z10 -> osm_boundary_polygon_gen_z10
-- etldoc: osm_boundary_polygon_gen_z9 -> osm_boundary_polygon_gen_z9
-- etldoc: osm_boundary_polygon_gen_z8 -> osm_boundary_polygon_gen_z8
-- etldoc: osm_boundary_polygon_gen_z7 -> osm_boundary_polygon_gen_z7
-- etldoc: osm_boundary_polygon_gen_z6 -> osm_boundary_polygon_gen_z6
-- etldoc: osm_boundary_polygon_gen_z5 -> osm_boundary_polygon_gen_z5
CREATE OR REPLACE FUNCTION update_osm_boundary_polygon() RETURNS void AS
$$
BEGIN
UPDATE osm_boundary_polygon
SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry);
UPDATE osm_boundary_polygon_gen_z13
SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry);
UPDATE osm_boundary_polygon_gen_z12
SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry);
UPDATE osm_boundary_polygon_gen_z11
SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry);
UPDATE osm_boundary_polygon_gen_z10
SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry);
UPDATE osm_boundary_polygon_gen_z9
SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry);
UPDATE osm_boundary_polygon_gen_z8
SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry);
UPDATE osm_boundary_polygon_gen_z7
SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry);
UPDATE osm_boundary_polygon_gen_z6
SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry);
UPDATE osm_boundary_polygon_gen_z5
SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry);
END;
$$ LANGUAGE plpgsql;
SELECT update_osm_boundary_polygon();
CREATE INDEX IF NOT EXISTS osm_boundary_polygon_point_geom_idx ON osm_boundary_polygon USING gist (geometry_point);
CREATE INDEX IF NOT EXISTS osm_boundary_polygon_gen_z13_point_geom_idx ON osm_boundary_polygon_gen_z13 USING gist (geometry_point);
CREATE INDEX IF NOT EXISTS osm_boundary_polygon_gen_z12_point_geom_idx ON osm_boundary_polygon_gen_z12 USING gist (geometry_point);
CREATE INDEX IF NOT EXISTS osm_boundary_polygon_gen_z11_point_geom_idx ON osm_boundary_polygon_gen_z11 USING gist (geometry_point);
CREATE INDEX IF NOT EXISTS osm_boundary_polygon_gen_z10_point_geom_idx ON osm_boundary_polygon_gen_z10 USING gist (geometry_point);
CREATE INDEX IF NOT EXISTS osm_boundary_polygon_gen_z9_point_geom_idx ON osm_boundary_polygon_gen_z9 USING gist (geometry_point);
CREATE INDEX IF NOT EXISTS osm_boundary_polygon_gen_z8_point_geom_idx ON osm_boundary_polygon_gen_z8 USING gist (geometry_point);
CREATE INDEX IF NOT EXISTS osm_boundary_polygon_gen_z7_point_geom_idx ON osm_boundary_polygon_gen_z7 USING gist (geometry_point);
CREATE INDEX IF NOT EXISTS osm_boundary_polygon_gen_z6_point_geom_idx ON osm_boundary_polygon_gen_z6 USING gist (geometry_point);
CREATE INDEX IF NOT EXISTS osm_boundary_polygon_gen_z5_point_geom_idx ON osm_boundary_polygon_gen_z5 USING gist (geometry_point);
CREATE OR REPLACE FUNCTION update_osm_boundary_polygon_row()
RETURNS trigger
AS
$$
BEGIN
NEW.tags = update_tags(NEW.tags, NEW.geometry);
NEW.geometry_point = ST_PointOnSurface(NEW.geometry);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER update_row
BEFORE INSERT OR UPDATE
ON osm_boundary_polygon
FOR EACH ROW
EXECUTE PROCEDURE update_osm_boundary_polygon_row();
CREATE TRIGGER update_row
BEFORE INSERT OR UPDATE
ON osm_boundary_polygon_gen_z13
FOR EACH ROW
EXECUTE PROCEDURE update_osm_boundary_polygon_row();
CREATE TRIGGER update_row
BEFORE INSERT OR UPDATE
ON osm_boundary_polygon_gen_z12
FOR EACH ROW
EXECUTE PROCEDURE update_osm_boundary_polygon_row();
CREATE TRIGGER update_row
BEFORE INSERT OR UPDATE
ON osm_boundary_polygon_gen_z11
FOR EACH ROW
EXECUTE PROCEDURE update_osm_boundary_polygon_row();
CREATE TRIGGER update_row
BEFORE INSERT OR UPDATE
ON osm_boundary_polygon_gen_z10
FOR EACH ROW
EXECUTE PROCEDURE update_osm_boundary_polygon_row();
CREATE TRIGGER update_row
BEFORE INSERT OR UPDATE
ON osm_boundary_polygon_gen_z9
FOR EACH ROW
EXECUTE PROCEDURE update_osm_boundary_polygon_row();
CREATE TRIGGER update_row
BEFORE INSERT OR UPDATE
ON osm_boundary_polygon_gen_z8
FOR EACH ROW
EXECUTE PROCEDURE update_osm_boundary_polygon_row();
CREATE TRIGGER update_row
BEFORE INSERT OR UPDATE
ON osm_boundary_polygon_gen_z7
FOR EACH ROW
EXECUTE PROCEDURE update_osm_boundary_polygon_row();
CREATE TRIGGER update_row
BEFORE INSERT OR UPDATE
ON osm_boundary_polygon_gen_z6
FOR EACH ROW
EXECUTE PROCEDURE update_osm_boundary_polygon_row();
CREATE TRIGGER update_row
BEFORE INSERT OR UPDATE
ON osm_boundary_polygon_gen_z5
FOR EACH ROW
EXECUTE PROCEDURE update_osm_boundary_polygon_row();

View File

@ -1,8 +1,8 @@
layer: layer:
id: "building" id: "building"
description: | description: |
All [OSM Buildings](http://wiki.openstreetmap.org/wiki/Buildings). All building tags are imported ([`building=*`](http://wiki.openstreetmap.org/wiki/Key:building)). All [OSM Buildings](http://wiki.openstreetmap.org/wiki/Buildings). All building tags are imported ([`building=*`](http://wiki.openstreetmap.org/wiki/Key:building)). The buildings are not yet ready for 3D rendering support and any help to improve
Only buildings with tag location:underground are excluded. this is welcomed.
buffer_size: 4 buffer_size: 4
datasource: datasource:
geometry_field: geometry geometry_field: geometry
@ -12,9 +12,9 @@ layer:
query: (SELECT osm_id, geometry, render_height, render_min_height, colour, hide_3d FROM layer_building(!bbox!, z(!scale_denominator!))) AS t query: (SELECT osm_id, geometry, render_height, render_min_height, colour, hide_3d FROM layer_building(!bbox!, z(!scale_denominator!))) AS t
fields: fields:
render_height: | render_height: |
An approximated height from levels and height of the building or building:part. An approximated height from levels and height of the building or building:part after the method of Paul Norman in [OSM Clear](https://github.com/ClearTables/osm-clear). For future 3D rendering of buildings.
render_min_height: | render_min_height: |
An approximated height from minimum levels or minimum height of the bottom of the building or building:part. An approximated height from levels and height of the bottom of the building or building:part after the method of Paul Norman in [OSM Clear](https://github.com/ClearTables/osm-clear). For future 3D rendering of buildings.
colour: | colour: |
Colour Colour
hide_3d: | hide_3d: |

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

After

Width:  |  Height:  |  Size: 32 KiB

View File

@ -60,14 +60,11 @@ tables:
aeroway: aeroway:
- terminal - terminal
- hangar - hangar
location:
- underground
filters: filters:
reject: reject:
building: ["no","none","No"] building: ["no","none","No"]
building:part: ["no","none","No"] building:part: ["no","none","No"]
man_made: ["bridge"] man_made: ["bridge"]
location: ["underground"]
type: polygon type: polygon
# etldoc: imposm3 -> osm_building_relation # etldoc: imposm3 -> osm_building_relation

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 19 KiB

View File

@ -1,44 +0,0 @@
{
"layers": [
{
"id": "building",
"type": "fill",
"source": "openmaptiles",
"source-layer": "building",
"minzoom": 12,
"maxzoom": 24,
"layout": {
"visibility": "visible"
},
"paint": {
"fill-color": {
"stops": [
[
13,
"rgba(222, 213, 207, 1)"
],
[
16,
"#d9d0c9"
]
]
},
"fill-outline-color": {
"base": 1,
"stops": [
[
13,
"#9A918A"
],
[
16,
"rgba(166, 157, 150, 1)"
]
]
}
},
"metadata": {},
"order": 19
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 16 KiB

View File

@ -14,20 +14,10 @@ SELECT
-- etldoc: osm_housenumber_point -> layer_housenumber:z14_ -- etldoc: osm_housenumber_point -> layer_housenumber:z14_
osm_id, osm_id,
geometry, geometry,
display_housenumber(housenumber) housenumber
FROM ( FROM osm_housenumber_point
SELECT WHERE zoom_level >= 14
osm_id, AND geometry && bbox;
geometry,
housenumber,
row_number() OVER(PARTITION BY concat(street, block_number, housenumber) ORDER BY has_name ASC) as rn
FROM osm_housenumber_point
WHERE 1=1
AND zoom_level >= 14
AND geometry && bbox
) t
WHERE rn = 1;
$$ LANGUAGE SQL STABLE $$ LANGUAGE SQL STABLE
-- STRICT -- STRICT
PARALLEL SAFE; PARALLEL SAFE;

View File

@ -3,18 +3,15 @@ layer:
description: | description: |
Everything in OpenStreetMap which contains a `addr:housenumber` tag useful for labelling housenumbers on a map. Everything in OpenStreetMap which contains a `addr:housenumber` tag useful for labelling housenumbers on a map.
This adds significant size to *z14*. For buildings the centroid of the building is used as housenumber. This adds significant size to *z14*. For buildings the centroid of the building is used as housenumber.
Duplicates within a tile are dropped if they have the same street/block_number (records without name tag are prioritized for preservation).
buffer_size: 8 buffer_size: 8
srs: +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over srs: +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over
fields: fields:
housenumber: Value of the [`addr:housenumber`](http://wiki.openstreetmap.org/wiki/Key:addr) tag. housenumber: Value of the [`addr:housenumber`](http://wiki.openstreetmap.org/wiki/Key:addr) tag.
If there are multiple values separated by semi-colons, the first and last value separated by a dash.
datasource: datasource:
geometry_field: geometry geometry_field: geometry
srid: 900913 srid: 900913
query: (SELECT geometry, housenumber FROM layer_housenumber(!bbox!, z(!scale_denominator!))) AS t query: (SELECT geometry, housenumber FROM layer_housenumber(!bbox!, z(!scale_denominator!))) AS t
schema: schema:
- ./housenumber_display.sql
- ./housenumber_centroid.sql - ./housenumber_centroid.sql
- ./housenumber.sql - ./housenumber.sql
datasources: datasources:

View File

@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS housenumber;
CREATE TABLE IF NOT EXISTS housenumber.osm_ids CREATE TABLE IF NOT EXISTS housenumber.osm_ids
( (
osm_id bigint PRIMARY KEY osm_id bigint
); );
-- etldoc: osm_housenumber_point -> osm_housenumber_point -- etldoc: osm_housenumber_point -> osm_housenumber_point
@ -22,16 +22,6 @@ $$
WHERE (full_update OR osm_id IN (SELECT osm_id FROM housenumber.osm_ids)) WHERE (full_update OR osm_id IN (SELECT osm_id FROM housenumber.osm_ids))
AND ST_GeometryType(geometry) <> 'ST_Point' AND ST_GeometryType(geometry) <> 'ST_Point'
AND ST_IsValid(geometry); AND ST_IsValid(geometry);
-- we don't need exact name just to know if it's present
UPDATE osm_housenumber_point
SET has_name =
CASE
WHEN has_name = '' THEN '0'
ELSE '1'
END
WHERE (full_update OR osm_id IN (SELECT osm_id FROM housenumber.osm_ids));
$$ LANGUAGE SQL; $$ LANGUAGE SQL;
SELECT convert_housenumber_point(true); SELECT convert_housenumber_point(true);
@ -41,7 +31,11 @@ SELECT convert_housenumber_point(true);
CREATE OR REPLACE FUNCTION housenumber.store() RETURNS trigger AS CREATE OR REPLACE FUNCTION housenumber.store() RETURNS trigger AS
$$ $$
BEGIN BEGIN
INSERT INTO housenumber.osm_ids VALUES (NEW.osm_id) ON CONFLICT (osm_id) DO NOTHING; IF (tg_op = 'DELETE') THEN
INSERT INTO housenumber.osm_ids VALUES (OLD.osm_id);
ELSE
INSERT INTO housenumber.osm_ids VALUES (NEW.osm_id);
END IF;
RETURN NULL; RETURN NULL;
END; END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
@ -66,11 +60,6 @@ DECLARE
t TIMESTAMP WITH TIME ZONE := clock_timestamp(); t TIMESTAMP WITH TIME ZONE := clock_timestamp();
BEGIN BEGIN
RAISE LOG 'Refresh housenumber'; RAISE LOG 'Refresh housenumber';
-- Analyze tracking and source tables before performing update
ANALYZE housenumber.osm_ids;
ANALYZE osm_housenumber_point;
PERFORM convert_housenumber_point(false); PERFORM convert_housenumber_point(false);
-- noinspection SqlWithoutWhere -- noinspection SqlWithoutWhere
DELETE FROM housenumber.osm_ids; DELETE FROM housenumber.osm_ids;
@ -83,17 +72,15 @@ END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
CREATE TRIGGER trigger_store CREATE TRIGGER trigger_store
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_housenumber_point ON osm_housenumber_point
FOR EACH ROW FOR EACH ROW
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE housenumber.store(); EXECUTE PROCEDURE housenumber.store();
CREATE TRIGGER trigger_flag CREATE TRIGGER trigger_flag
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_housenumber_point ON osm_housenumber_point
FOR EACH STATEMENT FOR EACH STATEMENT
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE housenumber.flag(); EXECUTE PROCEDURE housenumber.flag();
CREATE CONSTRAINT TRIGGER trigger_refresh CREATE CONSTRAINT TRIGGER trigger_refresh

View File

@ -1,20 +0,0 @@
CREATE OR REPLACE FUNCTION display_housenumber_nonnumeric(raw_housenumber text)
RETURNS text AS $$
-- Find the position of the semicolon in the input string
-- and extract the first and last value
SELECT substring(raw_housenumber from 1 for position(';' in raw_housenumber) - 1)
|| ''
|| substring(raw_housenumber from position(';' in raw_housenumber) + 1);
$$ LANGUAGE SQL IMMUTABLE;
CREATE OR REPLACE FUNCTION display_housenumber(raw_housenumber text)
RETURNS text AS $$
SELECT CASE
WHEN raw_housenumber !~ ';' THEN raw_housenumber
WHEN raw_housenumber ~ '[^0-9;]' THEN display_housenumber_nonnumeric(raw_housenumber)
ELSE
(SELECT min(value)::text || '' || max(value)::text
FROM unnest(array_remove(string_to_array(raw_housenumber, ';'), '')::bigint[]) AS value)
END
$$ LANGUAGE SQL IMMUTABLE;

View File

@ -12,15 +12,6 @@ tables:
- name: housenumber - name: housenumber
key: addr:housenumber key: addr:housenumber
type: string type: string
- name: street
key: addr:street
type: string
- name: block_number
key: addr:block_number
type: string
- name: has_name
key: name
type: string
type_mappings: type_mappings:
points: points:
addr:housenumber: addr:housenumber:

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 11 KiB

View File

@ -1,40 +0,0 @@
{
"layers": [
{
"id": "housenumber",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "housenumber",
"minzoom": 17,
"layout": {
"text-font": [
"Noto Sans Regular"
],
"text-size": {
"stops": [
[
17,
9
],
[
22,
11
]
]
},
"text-field": "{housenumber}",
"text-padding": 3,
"text-line-height": -0.15,
"symbol-avoid-edges": false,
"text-allow-overlap": false,
"text-ignore-placement": false
},
"paint": {
"text-color": "rgba(102, 102, 102, 1)",
"text-halo-color": "rgba(255,255,255,0.8)",
"text-halo-width": 1
},
"order": 149
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 415 KiB

After

Width:  |  Height:  |  Size: 366 KiB

View File

@ -13,7 +13,7 @@ DROP TABLE IF EXISTS simplify_vw_z11 CASCADE;
DROP TABLE IF EXISTS simplify_vw_z12 CASCADE; DROP TABLE IF EXISTS simplify_vw_z12 CASCADE;
DROP TABLE IF EXISTS simplify_vw_z13 CASCADE; DROP TABLE IF EXISTS simplify_vw_z13 CASCADE;
-- etldoc: osm_landcover_polygon -> simplify_vw_z13 -- etldoc: osm_landcover_polygon -> osm_landcover_gen_z13
CREATE TABLE simplify_vw_z13 AS CREATE TABLE simplify_vw_z13 AS
( (
SELECT subclass, SELECT subclass,
@ -22,19 +22,32 @@ CREATE TABLE simplify_vw_z13 AS
ST_SimplifyVW(geometry, power(zres(13),2)), ST_SimplifyVW(geometry, power(zres(13),2)),
0.001)) AS geometry 0.001)) AS geometry
FROM osm_landcover_polygon FROM osm_landcover_polygon
WHERE ST_Area(geometry) > power(zres(12),2) WHERE ST_Area(geometry) > power(zres(10),2)
); );
CREATE INDEX ON simplify_vw_z13 USING GIST (geometry); CREATE INDEX ON simplify_vw_z13 USING GIST (geometry);
-- etldoc: simplify_vw_z13 -> osm_landcover_gen_z13
CREATE TABLE osm_landcover_gen_z13 AS CREATE TABLE osm_landcover_gen_z13 AS
( (
SELECT subclass,
ST_MakeValid(
(ST_dump(
ST_Union(geometry))).geom) AS geometry
FROM (
SELECT subclass,
ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry
FROM simplify_vw_z13
WHERE ST_NPoints(geometry) < 50
AND subclass IN ('wood', 'forest')) union_geom50
GROUP BY subclass,
cid
UNION ALL
SELECT subclass, ST_MakeValid((ST_dump(ST_Union(geometry))).geom) AS geometry SELECT subclass, ST_MakeValid((ST_dump(ST_Union(geometry))).geom) AS geometry
FROM ( FROM (
SELECT subclass, SELECT subclass,
ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry
FROM simplify_vw_z13 FROM simplify_vw_z13
WHERE ST_NPoints(geometry) < 300 WHERE ST_NPoints(geometry) >= 50
AND ST_NPoints(geometry) < 300
AND subclass IN ('wood', 'forest')) union_geom300 AND subclass IN ('wood', 'forest')) union_geom300
GROUP BY subclass, GROUP BY subclass,
cid cid
@ -49,7 +62,7 @@ CREATE TABLE osm_landcover_gen_z13 AS
CREATE INDEX ON osm_landcover_gen_z13 USING GIST (geometry); CREATE INDEX ON osm_landcover_gen_z13 USING GIST (geometry);
-- etldoc: simplify_vw_z13 -> simplify_vw_z12 -- etldoc: osm_landcover_gen_z13 -> osm_landcover_gen_z12
CREATE TABLE simplify_vw_z12 AS CREATE TABLE simplify_vw_z12 AS
( (
SELECT subclass, SELECT subclass,
@ -58,19 +71,32 @@ CREATE TABLE simplify_vw_z12 AS
ST_SimplifyVW(geometry, power(zres(12),2)), ST_SimplifyVW(geometry, power(zres(12),2)),
0.001)) AS geometry 0.001)) AS geometry
FROM simplify_vw_z13 FROM simplify_vw_z13
WHERE ST_Area(geometry) > power(zres(11),2) WHERE ST_Area(geometry) > power(zres(9),2)
); );
CREATE INDEX ON simplify_vw_z12 USING GIST (geometry); CREATE INDEX ON simplify_vw_z12 USING GIST (geometry);
-- etldoc: simplify_vw_z12 -> osm_landcover_gen_z12
CREATE TABLE osm_landcover_gen_z12 AS CREATE TABLE osm_landcover_gen_z12 AS
( (
SELECT subclass,
ST_MakeValid(
(ST_dump(
ST_Union(geometry))).geom) AS geometry
FROM (
SELECT subclass,
ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry
FROM simplify_vw_z12
WHERE ST_NPoints(geometry) < 50
AND subclass IN ('wood', 'forest')) union_geom50
GROUP BY subclass,
cid
UNION ALL
SELECT subclass, ST_MakeValid((ST_dump(ST_Union(geometry))).geom) AS geometry SELECT subclass, ST_MakeValid((ST_dump(ST_Union(geometry))).geom) AS geometry
FROM ( FROM (
SELECT subclass, SELECT subclass,
ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry
FROM simplify_vw_z12 FROM simplify_vw_z12
WHERE ST_NPoints(geometry) < 300 WHERE ST_NPoints(geometry) >= 50
AND ST_NPoints(geometry) < 300
AND subclass IN ('wood', 'forest')) union_geom300 AND subclass IN ('wood', 'forest')) union_geom300
GROUP BY subclass, GROUP BY subclass,
cid cid
@ -85,7 +111,7 @@ CREATE TABLE osm_landcover_gen_z12 AS
CREATE INDEX ON osm_landcover_gen_z12 USING GIST (geometry); CREATE INDEX ON osm_landcover_gen_z12 USING GIST (geometry);
-- etldoc: simplify_vw_z12 -> simplify_vw_z11 -- etldoc: osm_landcover_gen_z12 -> osm_landcover_gen_z11
CREATE TABLE simplify_vw_z11 AS CREATE TABLE simplify_vw_z11 AS
( (
SELECT subclass, SELECT subclass,
@ -94,19 +120,32 @@ CREATE TABLE simplify_vw_z11 AS
ST_SimplifyVW(geometry, power(zres(11),2)), ST_SimplifyVW(geometry, power(zres(11),2)),
0.001)) AS geometry 0.001)) AS geometry
FROM simplify_vw_z12 FROM simplify_vw_z12
WHERE ST_Area(geometry) > power(zres(10),2) WHERE ST_Area(geometry) > power(zres(8),2)
); );
CREATE INDEX ON simplify_vw_z11 USING GIST (geometry); CREATE INDEX ON simplify_vw_z11 USING GIST (geometry);
-- etldoc: simplify_vw_z11 -> osm_landcover_gen_z11
CREATE TABLE osm_landcover_gen_z11 AS CREATE TABLE osm_landcover_gen_z11 AS
( (
SELECT subclass,
ST_MakeValid(
(ST_dump(
ST_Union(geometry))).geom) AS geometry
FROM (
SELECT subclass,
ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry
FROM simplify_vw_z11
WHERE ST_NPoints(geometry) < 50
AND subclass IN ('wood', 'forest')) union_geom50
GROUP BY subclass,
cid
UNION ALL
SELECT subclass, ST_MakeValid((ST_dump(ST_Union(geometry))).geom) AS geometry SELECT subclass, ST_MakeValid((ST_dump(ST_Union(geometry))).geom) AS geometry
FROM ( FROM (
SELECT subclass, SELECT subclass,
ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry
FROM simplify_vw_z11 FROM simplify_vw_z11
WHERE ST_NPoints(geometry) < 300 WHERE ST_NPoints(geometry) >= 50
AND ST_NPoints(geometry) < 300
AND subclass IN ('wood', 'forest')) union_geom300 AND subclass IN ('wood', 'forest')) union_geom300
GROUP BY subclass, GROUP BY subclass,
cid cid
@ -121,7 +160,7 @@ CREATE TABLE osm_landcover_gen_z11 AS
CREATE INDEX ON osm_landcover_gen_z11 USING GIST (geometry); CREATE INDEX ON osm_landcover_gen_z11 USING GIST (geometry);
-- etldoc: simplify_vw_z11 -> simplify_vw_z10 -- etldoc: osm_landcover_gen_z11 -> osm_landcover_gen_z10
CREATE TABLE simplify_vw_z10 AS CREATE TABLE simplify_vw_z10 AS
( (
SELECT subclass, SELECT subclass,
@ -130,19 +169,32 @@ CREATE TABLE simplify_vw_z10 AS
ST_SimplifyVW(geometry, power(zres(10),2)), ST_SimplifyVW(geometry, power(zres(10),2)),
0.001)) AS geometry 0.001)) AS geometry
FROM simplify_vw_z11 FROM simplify_vw_z11
WHERE ST_Area(geometry) > power(zres(9),2) WHERE ST_Area(geometry) > power(zres(8),2)
); );
CREATE INDEX ON simplify_vw_z10 USING GIST (geometry); CREATE INDEX ON simplify_vw_z10 USING GIST (geometry);
-- etldoc: simplify_vw_z10 -> osm_landcover_gen_z10
CREATE TABLE osm_landcover_gen_z10 AS CREATE TABLE osm_landcover_gen_z10 AS
( (
SELECT subclass,
ST_MakeValid(
(ST_dump(
ST_Union(geometry))).geom) AS geometry
FROM (
SELECT subclass,
ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry
FROM simplify_vw_z10
WHERE ST_NPoints(geometry) < 50
AND subclass IN ('wood', 'forest')) union_geom50
GROUP BY subclass,
cid
UNION ALL
SELECT subclass, ST_MakeValid((ST_dump(ST_Union(geometry))).geom) AS geometry SELECT subclass, ST_MakeValid((ST_dump(ST_Union(geometry))).geom) AS geometry
FROM ( FROM (
SELECT subclass, SELECT subclass,
ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry
FROM simplify_vw_z10 FROM simplify_vw_z10
WHERE ST_NPoints(geometry) < 300 WHERE ST_NPoints(geometry) >= 50
AND ST_NPoints(geometry) < 300
AND subclass IN ('wood', 'forest')) union_geom300 AND subclass IN ('wood', 'forest')) union_geom300
GROUP BY subclass, GROUP BY subclass,
cid cid
@ -157,7 +209,7 @@ CREATE TABLE osm_landcover_gen_z10 AS
CREATE INDEX ON osm_landcover_gen_z10 USING GIST (geometry); CREATE INDEX ON osm_landcover_gen_z10 USING GIST (geometry);
-- etldoc: simplify_vw_z10 -> simplify_vw_z9 -- etldoc: osm_landcover_gen_z10 -> osm_landcover_gen_z9
CREATE TABLE simplify_vw_z9 AS CREATE TABLE simplify_vw_z9 AS
( (
SELECT subclass, SELECT subclass,
@ -166,19 +218,32 @@ CREATE TABLE simplify_vw_z9 AS
ST_SimplifyVW(geometry, power(zres(9),2)), ST_SimplifyVW(geometry, power(zres(9),2)),
0.001)) AS geometry 0.001)) AS geometry
FROM simplify_vw_z10 FROM simplify_vw_z10
WHERE ST_Area(geometry) > power(zres(8),2) WHERE ST_Area(geometry) > power(zres(7),2)
); );
CREATE INDEX ON simplify_vw_z9 USING GIST (geometry); CREATE INDEX ON simplify_vw_z9 USING GIST (geometry);
-- etldoc: simplify_vw_z9 -> osm_landcover_gen_z9
CREATE TABLE osm_landcover_gen_z9 AS CREATE TABLE osm_landcover_gen_z9 AS
( (
SELECT subclass,
ST_MakeValid(
(ST_dump(
ST_Union(geometry))).geom) AS geometry
FROM (
SELECT subclass,
ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry
FROM simplify_vw_z9
WHERE ST_NPoints(geometry) < 50
AND subclass IN ('wood', 'forest')) union_geom50
GROUP BY subclass,
cid
UNION ALL
SELECT subclass, ST_MakeValid((ST_dump(ST_Union(geometry))).geom) AS geometry SELECT subclass, ST_MakeValid((ST_dump(ST_Union(geometry))).geom) AS geometry
FROM ( FROM (
SELECT subclass, SELECT subclass,
ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) over () AS cid, geometry
FROM simplify_vw_z9 FROM simplify_vw_z9
WHERE ST_NPoints(geometry) < 300 WHERE ST_NPoints(geometry) >= 50
AND ST_NPoints(geometry) < 300
AND subclass IN ('wood', 'forest')) union_geom300 AND subclass IN ('wood', 'forest')) union_geom300
GROUP BY subclass, GROUP BY subclass,
cid cid
@ -205,7 +270,7 @@ CREATE TABLE osm_landcover_gen_z9 AS
CREATE INDEX ON osm_landcover_gen_z9 USING GIST (geometry); CREATE INDEX ON osm_landcover_gen_z9 USING GIST (geometry);
-- etldoc: simplify_vw_z9 -> simplify_vw_z8 -- etldoc: osm_landcover_gen_z9 -> osm_landcover_gen_z8
CREATE TABLE simplify_vw_z8 AS CREATE TABLE simplify_vw_z8 AS
( (
SELECT subclass, SELECT subclass,
@ -214,11 +279,10 @@ CREATE TABLE simplify_vw_z8 AS
ST_SimplifyVW(geometry, power(zres(8),2)), ST_SimplifyVW(geometry, power(zres(8),2)),
0.001)) AS geometry 0.001)) AS geometry
FROM simplify_vw_z9 FROM simplify_vw_z9
WHERE ST_Area(geometry) > power(zres(7),2) WHERE ST_Area(geometry) > power(zres(6),2)
); );
CREATE INDEX ON simplify_vw_z8 USING GIST (geometry); CREATE INDEX ON simplify_vw_z8 USING GIST (geometry);
-- etldoc: simplify_vw_z8 -> osm_landcover_gen_z8
CREATE TABLE osm_landcover_gen_z8 AS CREATE TABLE osm_landcover_gen_z8 AS
( (
SELECT subclass, SELECT subclass,
@ -231,7 +295,6 @@ SELECT subclass,
ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) OVER () AS cid, ST_ClusterDBSCAN(geometry, eps := 0, minpoints := 1) OVER () AS cid,
geometry geometry
FROM simplify_vw_z8 FROM simplify_vw_z8
WHERE subclass IN ('wood', 'forest')
) union_geom ) union_geom
GROUP BY subclass, GROUP BY subclass,
cid cid
@ -245,7 +308,7 @@ SELECT subclass,
CREATE INDEX ON osm_landcover_gen_z8 USING GIST (geometry); CREATE INDEX ON osm_landcover_gen_z8 USING GIST (geometry);
-- etldoc: simplify_vw_z8 -> simplify_vw_z7 -- etldoc: osm_landcover_gen_z8 -> osm_landcover_gen_z7
CREATE TABLE simplify_vw_z7 AS CREATE TABLE simplify_vw_z7 AS
( (
SELECT subclass, SELECT subclass,
@ -254,11 +317,10 @@ CREATE TABLE simplify_vw_z7 AS
ST_SimplifyVW(geometry, power(zres(7),2)), ST_SimplifyVW(geometry, power(zres(7),2)),
0.001)) AS geometry 0.001)) AS geometry
FROM simplify_vw_z8 FROM simplify_vw_z8
WHERE ST_Area(geometry) > power(zres(6),2) WHERE ST_Area(geometry) > power(zres(5),2)
); );
CREATE INDEX ON simplify_vw_z7 USING GIST (geometry); CREATE INDEX ON simplify_vw_z7 USING GIST (geometry);
-- etldoc: simplify_vw_z7 -> osm_landcover_gen_z7
CREATE TABLE osm_landcover_gen_z7 AS CREATE TABLE osm_landcover_gen_z7 AS
( (
SELECT subclass, SELECT subclass,

View File

@ -1,12 +1,5 @@
layer: layer:
id: "landcover" id: "landcover"
requires:
tables:
- ne_10m_antarctic_ice_shelves_polys
- ne_10m_glaciated_areas
- ne_50m_antarctic_ice_shelves_polys
- ne_50m_glaciated_areas
- ne_110m_glaciated_areas
description: | description: |
Landcover is used to describe the physical material at the surface of the earth. At lower zoom levels this is Landcover is used to describe the physical material at the surface of the earth. At lower zoom levels this is
from Natural Earth data for glaciers and ice shelves and at higher zoom levels the landcover is [implied by OSM tags](http://wiki.openstreetmap.org/wiki/Landcover). The most common use case for this layer from Natural Earth data for glaciers and ice shelves and at higher zoom levels the landcover is [implied by OSM tags](http://wiki.openstreetmap.org/wiki/Landcover). The most common use case for this layer
@ -26,7 +19,7 @@ layer:
rock: rock:
subclass: ['bare_rock', 'scree'] subclass: ['bare_rock', 'scree']
grass: grass:
subclass: ['fell', 'flowerbed', 'grassland', 'heath', 'scrub', 'shrubbery', 'tundra', 'grass', 'meadow', 'allotments', 'park', 'village_green', 'recreation_ground', 'garden', 'golf_course'] subclass: ['fell', 'grassland', 'heath', 'scrub', 'tundra', 'grass', 'meadow', 'allotments', 'park', 'village_green', 'recreation_ground', 'garden', 'golf_course']
wetland: wetland:
subclass: ['wetland', 'bog', 'swamp', 'wet_meadow', 'marsh', 'reedbed', 'saltern', 'tidalflat', 'saltmarsh', 'mangrove'] subclass: ['wetland', 'bog', 'swamp', 'wet_meadow', 'marsh', 'reedbed', 'saltern', 'tidalflat', 'saltmarsh', 'mangrove']
sand: sand:
@ -46,11 +39,9 @@ layer:
- bog - bog
- dune - dune
- scrub - scrub
- shrubbery
- farm - farm
- farmland - farmland
- fell - fell
- flowerbed
- forest - forest
- garden - garden
- glacier - glacier

View File

@ -18,7 +18,6 @@ tables:
- farm - farm
- farmland - farmland
- orchard - orchard
- flowerbed
- plant_nursery - plant_nursery
- vineyard - vineyard
- grass - grass
@ -27,6 +26,8 @@ tables:
- forest - forest
- village_green - village_green
- recreation_ground - recreation_ground
# There are 600 parks tagged with landuse=park instead of leisure=park
- park
natural: natural:
- wood - wood
- wetland - wetland
@ -34,7 +35,6 @@ tables:
- grassland - grassland
- heath - heath
- scrub - scrub
- shrubbery
- tundra - tundra
- glacier - glacier
- bare_rock - bare_rock

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

After

Width:  |  Height:  |  Size: 66 KiB

View File

@ -1,482 +0,0 @@
{
"layers": [
{
"id": "landcover_classes",
"type": "fill",
"source": "openmaptiles",
"source-layer": "landcover",
"maxzoom": 13,
"layout": {
"visibility": "visible"
},
"paint": {
"fill-color": [
"match",
[
"get",
"class"
],
"farmland",
"#eef0d5",
"wood",
"#add19e",
"rock",
"#eee5dc",
"grass",
"#cdebb0",
"sand",
"#f5e9c6",
"wetland",
"#add19e",
"#000"
],
"fill-opacity": {
"stops": [
[
7,
0.5
],
[
10,
1
]
]
},
"fill-antialias": false
},
"metadata": {},
"filter": [
"all",
[
"in",
"class",
"farmland",
"wood",
"rock",
"grass",
"wetland",
"sand"
]
],
"order": 4
},
{
"id": "landcover_class_outline",
"type": "line",
"source": "openmaptiles",
"source-layer": "landcover",
"layout": {
"visibility": "visible"
},
"paint": {
"line-color": "#c7c9ae",
"line-width": 0.5
},
"filter": [
"all",
[
"in",
"class",
"farmland"
]
],
"order": 5
},
{
"id": "landcover_park",
"type": "fill",
"source": "openmaptiles",
"source-layer": "landcover",
"minzoom": 13,
"layout": {
"visibility": "visible"
},
"paint": {
"fill-color": "#c8facc",
"fill-antialias": true
},
"filter": [
"all",
[
"==",
"subclass",
"park"
]
],
"order": 6
},
{
"id": "landcover_subclasses",
"type": "fill",
"source": "openmaptiles",
"source-layer": "landcover",
"minzoom": 13,
"layout": {
"visibility": "visible"
},
"paint": {
"fill-color": [
"match",
[
"get",
"subclass"
],
"allotments",
"#c9e1bf",
"bare_rock",
"#eee5dc",
"beach",
"#fff1ba",
"bog",
"#d6d99f",
"dune",
"#f5e9c6",
"scrub",
"#c8d7ab",
"farm",
"#f5dcba",
"farmland",
"#eef0d5",
"flowerbed",
"#cdebb0",
"forest",
"#add19e",
"grass",
"#cdebb0",
"grassland",
"#cdebb0",
"golf_course",
"#def6c0",
"heath",
"#d6d99f",
"mangrove",
"#c8d7ab",
"meadow",
"#cdebb0",
"orchard",
"#aedfa3",
"park",
"#c8facc",
"garden",
"#cdebb0",
"plant_nursery",
"#aedfa3",
"recreation_ground",
"#d5ffd9",
"reedbed",
"#cdebb0",
"saltmarsh",
"#cdebb0",
"sand",
"#f5e9c6",
"scree",
"#eee5dc",
"swamp",
"#add19e",
"tidalflat",
"#DED6CF",
"village_green",
"#cdebb0",
"vineyard",
"#aedfa3",
"wet_meadow",
"#cdebb0",
"wetland",
"#add19e",
"wood",
"#add19e",
"marsh",
"#ff0",
"#FFFFFF"
],
"fill-antialias": true
},
"filter": [
"all",
[
"in",
"subclass",
"allotments",
"bare_rock",
"beach",
"dune",
"scrub",
"farm",
"farmland",
"flowerbed",
"forest",
"garden",
"grass",
"grassland",
"golf_course",
"heath",
"meadow",
"orchard",
"plant_nursery",
"recreation_ground",
"reedbed",
"saltmarsh",
"sand",
"scree",
"swamp",
"tidalflat",
"tundra",
"village_green",
"vineyard",
"wet_meadow",
"wetland",
"wood"
]
],
"order": 7
},
{
"id": "landcover_subclass_patterns",
"type": "fill",
"source": "openmaptiles",
"source-layer": "landcover",
"minzoom": 13,
"layout": {
"visibility": "visible"
},
"paint": {
"fill-opacity": [
"match",
[
"get",
"subclass"
],
"beach",
0.4,
"forest",
0.4,
"bare_rock",
0.3,
"scrub",
0.6,
"garden",
0.6,
"scree",
0.3,
"wood",
0.4,
1
],
"fill-pattern": [
"match",
[
"get",
"subclass"
],
"allotments",
"allotments",
"bare_rock",
"rock_overlay",
"beach",
"beach",
"bog",
"wetland_bog",
"scrub",
"scrub",
"flowerbed",
"flowerbed_high_zoom",
"forest",
"leaftype_unknown",
"garden",
"plant_nursery",
"mangrove",
"wetland_mangrove",
"marsh",
"wetland_marsh",
"orchard",
"orchard",
"plant_nursery",
"plant_nursery",
"reedbed",
"wetland_reed",
"saltmarsh",
"wetland_marsh",
"scree",
"scree_overlay",
"swamp",
"wetland_swamp",
"vineyard",
"vineyard",
"wet_meadow",
"wetland_marsh",
"wetland",
"wetland",
"wood",
"leaftype_unknown",
""
]
},
"metadata": {},
"filter": [
"all",
[
"in",
"subclass",
"allotments",
"bare_rock",
"beach",
"bog",
"dune",
"scrub",
"farm",
"farmland",
"flowerbed",
"forest",
"garden",
"grass",
"grassland",
"golf_course",
"heath",
"mangrove",
"marsh",
"meadow",
"orchard",
"park",
"plant_nursery",
"recreation_ground",
"reedbed",
"saltern",
"saltmarsh",
"sand",
"scree",
"swamp",
"village_green",
"vineyard",
"wet_meadow",
"wetland",
"wood"
]
],
"order": 8
},
{
"id": "landcover_subclass_outline",
"type": "line",
"source": "openmaptiles",
"source-layer": "landcover",
"minzoom": 15,
"layout": {
"visibility": "visible"
},
"paint": {
"line-color": [
"match",
[
"get",
"subclass"
],
"allotments",
"#B1C6A8",
"farm",
"#d1b48c",
"farmland",
"#c7c9ae",
"recreation_ground",
"#3c6640",
"#000"
],
"line-width": [
"match",
[
"get",
"subclass"
],
"recreation_ground",
0.3,
0.5
],
"line-opacity": 1
},
"filter": [
"all",
[
"in",
"subclass",
"allotments",
"farm",
"farmland",
"recreation_ground"
]
],
"order": 9
},
{
"id": "landcover_ice",
"type": "fill",
"source": "openmaptiles",
"source-layer": "landcover",
"minzoom": 5,
"paint": {
"fill-color": "#ddecec",
"fill-antialias": false
},
"metadata": {},
"filter": [
"all",
[
"in",
"class",
"ice"
]
],
"order": 10
},
{
"id": "landcover_ice_outline",
"type": "line",
"source": "openmaptiles",
"source-layer": "landcover",
"minzoom": 5,
"layout": {
"visibility": "visible"
},
"paint": {
"line-color": "#9cf",
"line-width": {
"stops": [
[
5,
1
],
[
10,
1.5
]
]
},
"line-dasharray": {
"stops": [
[
5,
[
1,
0
]
],
[
10,
[
4,
2
]
]
]
}
},
"filter": [
"all",
[
"in",
"class",
"ice"
]
],
"order": 11
}
]
}

View File

@ -1,3 +0,0 @@
{
"layers": []
}

View File

@ -1,10 +0,0 @@
-- Unify class names that represent the same type of feature
CREATE OR REPLACE FUNCTION landuse_unify(class text) RETURNS text LANGUAGE plpgsql
AS
$$
BEGIN
RETURN CASE
WHEN class='grave_yard' THEN 'cemetery'
ELSE class END;
END;
$$;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 333 KiB

After

Width:  |  Height:  |  Size: 176 KiB

View File

@ -37,202 +37,6 @@ WHERE scalerank <= 2
) /* DELAY_MATERIALIZED_VIEW_CREATION */ ; ) /* DELAY_MATERIALIZED_VIEW_CREATION */ ;
CREATE INDEX IF NOT EXISTS ne_50m_urban_areas_gen_z4_idx ON ne_50m_urban_areas_gen_z4 USING gist (geometry); CREATE INDEX IF NOT EXISTS ne_50m_urban_areas_gen_z4_idx ON ne_50m_urban_areas_gen_z4 USING gist (geometry);
-- etldoc: osm_landuse_polygon_gen_z6 -> osm_landuse_polygon_gen_z6_union
-- etldoc: osm_residential_gen_z6 -> osm_landuse_polygon_gen_z6_union
CREATE OR REPLACE VIEW osm_landuse_polygon_gen_z6_union AS
(
SELECT osm_id,
geometry,
landuse,
amenity,
leisure,
tourism,
place,
waterway,
man_made
FROM osm_landuse_polygon_gen_z6
WHERE landuse <> 'residential'
UNION ALL
SELECT NULL::bigint AS osm_id,
geometry,
'residential' AS landuse,
'' AS amenity,
'' AS leisure,
'' AS tourism,
'' AS place,
'' AS waterway,
'' AS man_made
FROM osm_residential_gen_z6
);
-- etldoc: osm_landuse_polygon_gen_z7 -> osm_landuse_polygon_gen_z7_union
-- etldoc: osm_residential_gen_z7 -> osm_landuse_polygon_gen_z7_union
CREATE OR REPLACE VIEW osm_landuse_polygon_gen_z7_union AS
(
SELECT osm_id,
geometry,
landuse,
amenity,
leisure,
tourism,
place,
waterway,
man_made
FROM osm_landuse_polygon_gen_z7
WHERE landuse <> 'residential'
UNION ALL
SELECT NULL::bigint AS osm_id,
geometry,
'residential' AS landuse,
'' AS amenity,
'' AS leisure,
'' AS tourism,
'' AS place,
'' AS waterway,
'' AS man_made
FROM osm_residential_gen_z7
);
-- etldoc: osm_landuse_polygon_gen_z8 -> osm_landuse_polygon_gen_z8_union
-- etldoc: osm_residential_gen_z8 -> osm_landuse_polygon_gen_z8_union
CREATE OR REPLACE VIEW osm_landuse_polygon_gen_z8_union AS
(
SELECT osm_id,
geometry,
landuse,
amenity,
leisure,
tourism,
place,
waterway,
man_made
FROM osm_landuse_polygon_gen_z8
WHERE landuse <> 'residential'
UNION ALL
SELECT NULL::bigint AS osm_id,
geometry,
'residential' AS landuse,
'' AS amenity,
'' AS leisure,
'' AS tourism,
'' AS place,
'' AS waterway,
'' AS man_made
FROM osm_residential_gen_z8
);
-- etldoc: osm_landuse_polygon_gen_z9 -> osm_landuse_polygon_gen_z9_union
-- etldoc: osm_residential_gen_z9 -> osm_landuse_polygon_gen_z9_union
CREATE OR REPLACE VIEW osm_landuse_polygon_gen_z9_union AS
(
SELECT osm_id,
geometry,
landuse,
amenity,
leisure,
tourism,
place,
waterway,
man_made
FROM osm_landuse_polygon_gen_z9
WHERE landuse <> 'residential'
UNION ALL
SELECT NULL::bigint AS osm_id,
geometry,
'residential' AS landuse,
'' AS amenity,
'' AS leisure,
'' AS tourism,
'' AS place,
'' AS waterway,
'' AS man_made
FROM osm_residential_gen_z9
);
-- etldoc: osm_landuse_polygon_gen_z10 -> osm_landuse_polygon_gen_z10_union
-- etldoc: osm_residential_gen_z10 -> osm_landuse_polygon_gen_z10_union
CREATE OR REPLACE VIEW osm_landuse_polygon_gen_z10_union AS
(
SELECT osm_id,
geometry,
landuse,
amenity,
leisure,
tourism,
place,
waterway,
man_made
FROM osm_landuse_polygon_gen_z10
WHERE landuse <> 'residential'
UNION ALL
SELECT NULL::bigint AS osm_id,
geometry,
'residential' AS landuse,
'' AS amenity,
'' AS leisure,
'' AS tourism,
'' AS place,
'' AS waterway,
'' AS man_made
FROM osm_residential_gen_z10
);
-- etldoc: osm_landuse_polygon_gen_z11 -> osm_landuse_polygon_gen_z11_union
-- etldoc: osm_residential_gen_z11 -> osm_landuse_polygon_gen_z11_union
CREATE OR REPLACE VIEW osm_landuse_polygon_gen_z11_union AS
(
SELECT osm_id,
geometry,
landuse,
amenity,
leisure,
tourism,
place,
waterway,
man_made
FROM osm_landuse_polygon_gen_z11
WHERE landuse <> 'residential'
UNION ALL
SELECT NULL::bigint AS osm_id,
geometry,
'residential' AS landuse,
'' AS amenity,
'' AS leisure,
'' AS tourism,
'' AS place,
'' AS waterway,
'' AS man_made
FROM osm_residential_gen_z11
);
-- etldoc: osm_landuse_polygon_gen_z12 -> osm_landuse_polygon_gen_z12_union
-- etldoc: osm_residential_gen_z12 -> osm_landuse_polygon_gen_z12_union
CREATE OR REPLACE VIEW osm_landuse_polygon_gen_z12_union AS
(
SELECT osm_id,
geometry,
landuse,
amenity,
leisure,
tourism,
place,
waterway,
man_made
FROM osm_landuse_polygon_gen_z12
WHERE landuse <> 'residential'
UNION ALL
SELECT NULL::bigint AS osm_id,
geometry,
'residential' AS landuse,
'' AS amenity,
'' AS leisure,
'' AS tourism,
'' AS place,
'' AS waterway,
'' AS man_made
FROM osm_residential_gen_z12
);
-- etldoc: layer_landuse[shape=record fillcolor=lightpink, style="rounded,filled", -- etldoc: layer_landuse[shape=record fillcolor=lightpink, style="rounded,filled",
-- etldoc: label="layer_landuse |<z4> z4|<z5> z5|<z6> z6|<z7> z7|<z8> z8|<z9> z9|<z10> z10|<z11> z11|<z12> z12|<z13> z13|<z14> z14+" ] ; -- etldoc: label="layer_landuse |<z4> z4|<z5> z5|<z6> z6|<z7> z7|<z8> z8|<z9> z9|<z10> z10|<z11> z11|<z12> z12|<z13> z13|<z14> z14+" ] ;
@ -247,7 +51,6 @@ AS
$$ $$
SELECT osm_id, SELECT osm_id,
geometry, geometry,
landuse_unify(
COALESCE( COALESCE(
NULLIF(landuse, ''), NULLIF(landuse, ''),
NULLIF(amenity, ''), NULLIF(amenity, ''),
@ -256,7 +59,7 @@ SELECT osm_id,
NULLIF(place, ''), NULLIF(place, ''),
NULLIF(waterway, ''), NULLIF(waterway, ''),
NULLIF(man_made, '') NULLIF(man_made, '')
)) AS class ) AS class
FROM ( FROM (
-- etldoc: ne_50m_urban_areas_gen_z4 -> layer_landuse:z4 -- etldoc: ne_50m_urban_areas_gen_z4 -> layer_landuse:z4
SELECT osm_id, SELECT osm_id,
@ -284,7 +87,7 @@ FROM (
FROM ne_50m_urban_areas_gen_z5 FROM ne_50m_urban_areas_gen_z5
WHERE zoom_level = 5 WHERE zoom_level = 5
UNION ALL UNION ALL
-- etldoc: osm_landuse_polygon_gen_z6_union -> layer_landuse:z6 -- etldoc: osm_landuse_polygon_gen_z6 -> layer_landuse:z6
SELECT osm_id, SELECT osm_id,
geometry, geometry,
landuse, landuse,
@ -294,10 +97,10 @@ FROM (
place, place,
waterway, waterway,
man_made man_made
FROM osm_landuse_polygon_gen_z6_union FROM osm_landuse_polygon_gen_z6
WHERE zoom_level = 6 WHERE zoom_level = 6
UNION ALL UNION ALL
-- etldoc: osm_landuse_polygon_gen_z7_union -> layer_landuse:z7 -- etldoc: osm_landuse_polygon_gen_z7 -> layer_landuse:z7
SELECT osm_id, SELECT osm_id,
geometry, geometry,
landuse, landuse,
@ -307,10 +110,10 @@ FROM (
place, place,
waterway, waterway,
man_made man_made
FROM osm_landuse_polygon_gen_z7_union FROM osm_landuse_polygon_gen_z7
WHERE zoom_level = 7 WHERE zoom_level = 7
UNION ALL UNION ALL
-- etldoc: osm_landuse_polygon_gen_z8_union -> layer_landuse:z8 -- etldoc: osm_landuse_polygon_gen_z8 -> layer_landuse:z8
SELECT osm_id, SELECT osm_id,
geometry, geometry,
landuse, landuse,
@ -320,10 +123,10 @@ FROM (
place, place,
waterway, waterway,
man_made man_made
FROM osm_landuse_polygon_gen_z8_union FROM osm_landuse_polygon_gen_z8
WHERE zoom_level = 8 WHERE zoom_level = 8
UNION ALL UNION ALL
-- etldoc: osm_landuse_polygon_gen_z9_union -> layer_landuse:z9 -- etldoc: osm_landuse_polygon_gen_z9 -> layer_landuse:z9
SELECT osm_id, SELECT osm_id,
geometry, geometry,
landuse, landuse,
@ -333,10 +136,10 @@ FROM (
place, place,
waterway, waterway,
man_made man_made
FROM osm_landuse_polygon_gen_z9_union FROM osm_landuse_polygon_gen_z9
WHERE zoom_level = 9 WHERE zoom_level = 9
UNION ALL UNION ALL
-- etldoc: osm_landuse_polygon_gen_z10_union -> layer_landuse:z10 -- etldoc: osm_landuse_polygon_gen_z10 -> layer_landuse:z10
SELECT osm_id, SELECT osm_id,
geometry, geometry,
landuse, landuse,
@ -346,10 +149,10 @@ FROM (
place, place,
waterway, waterway,
man_made man_made
FROM osm_landuse_polygon_gen_z10_union FROM osm_landuse_polygon_gen_z10
WHERE zoom_level = 10 WHERE zoom_level = 10
UNION ALL UNION ALL
-- etldoc: osm_landuse_polygon_gen_z11_union -> layer_landuse:z11 -- etldoc: osm_landuse_polygon_gen_z11 -> layer_landuse:z11
SELECT osm_id, SELECT osm_id,
geometry, geometry,
landuse, landuse,
@ -359,10 +162,10 @@ FROM (
place, place,
waterway, waterway,
man_made man_made
FROM osm_landuse_polygon_gen_z11_union FROM osm_landuse_polygon_gen_z11
WHERE zoom_level = 11 WHERE zoom_level = 11
UNION ALL UNION ALL
-- etldoc: osm_landuse_polygon_gen_z12_union -> layer_landuse:z12 -- etldoc: osm_landuse_polygon_gen_z12 -> layer_landuse:z12
SELECT osm_id, SELECT osm_id,
geometry, geometry,
landuse, landuse,
@ -372,7 +175,7 @@ FROM (
place, place,
waterway, waterway,
man_made man_made
FROM osm_landuse_polygon_gen_z12_union FROM osm_landuse_polygon_gen_z12
WHERE zoom_level = 12 WHERE zoom_level = 12
UNION ALL UNION ALL
-- etldoc: osm_landuse_polygon_gen_z13 -> layer_landuse:z13 -- etldoc: osm_landuse_polygon_gen_z13 -> layer_landuse:z13

View File

@ -1,8 +1,5 @@
layer: layer:
id: "landuse" id: "landuse"
requires:
tables:
- ne_50m_urban_areas
description: | description: |
Landuse is used to describe use of land by humans. At lower zoom levels this is Landuse is used to describe use of land by humans. At lower zoom levels this is
from Natural Earth data for residential (urban) areas and at higher zoom levels mostly OSM `landuse` tags. from Natural Earth data for residential (urban) areas and at higher zoom levels mostly OSM `landuse` tags.
@ -52,13 +49,10 @@ layer:
- prison - prison
- wastewater_plant - wastewater_plant
- water_works - water_works
- quarry
datasource: datasource:
geometry_field: geometry geometry_field: geometry
query: (SELECT geometry, class FROM layer_landuse(!bbox!, z(!scale_denominator!))) AS t query: (SELECT geometry, class FROM layer_landuse(!bbox!, z(!scale_denominator!))) AS t
schema: schema:
- ./class.sql
- ./prep_landuse.sql
- ./landuse.sql - ./landuse.sql
datasources: datasources:
- type: imposm3 - type: imposm3

View File

@ -77,7 +77,6 @@ tables:
- railway - railway
- cemetery - cemetery
- military - military
- quarry
# zoning # zoning
- residential - residential
- commercial - commercial
@ -99,7 +98,6 @@ tables:
- motorcycle_parking - motorcycle_parking
- bicycle_parking - bicycle_parking
- animal_training - animal_training
- grave_yard
leisure: leisure:
- stadium - stadium
- pitch - pitch

Binary file not shown.

Before

Width:  |  Height:  |  Size: 63 KiB

After

Width:  |  Height:  |  Size: 69 KiB

View File

@ -1,176 +0,0 @@
DROP TABLE IF EXISTS cluster_zres14;
CREATE TABLE cluster_zres14 AS
(
WITH single_geom AS (
SELECT (ST_Dump(geometry)).geom AS geometry
FROM osm_landuse_polygon
WHERE landuse='residential'
)
SELECT ST_ClusterDBSCAN(geometry, eps := zres(14), minpoints := 1) over () AS cid,
geometry
FROM single_geom
);
CREATE INDEX ON cluster_zres14 USING gist(geometry);
DROP TABLE IF EXISTS cluster_zres14_union;
CREATE TABLE cluster_zres14_union AS (
SELECT ST_Buffer(
ST_Union(
ST_Buffer(
ST_SnapToGrid(geometry, 0.01)
, zres(14), 'join=mitre'
)
),-zres(14), 'join=mitre'
) AS geometry
FROM cluster_zres14
GROUP BY cid
);
CREATE INDEX ON cluster_zres14_union USING gist(geometry);
DROP TABLE IF EXISTS cluster_zres12;
CREATE TABLE cluster_zres12 AS
(
WITH single_geom AS (
SELECT (ST_Dump(geometry)).geom AS geometry
FROM osm_landuse_polygon
WHERE landuse='residential'
)
SELECT ST_ClusterDBSCAN(geometry, eps := zres(12), minpoints := 1) over () AS cid,
geometry
FROM single_geom
);
CREATE INDEX ON cluster_zres12 USING gist(geometry);
DROP TABLE IF EXISTS cluster_zres12_union;
CREATE TABLE cluster_zres12_union AS
(
SELECT ST_Buffer(
ST_Union(
ST_Buffer(
ST_SnapToGrid(geometry, 1)
, zres(12), 'join=mitre'
)
), -zres(12), 'join=mitre'
) AS geometry
FROM cluster_zres12
GROUP BY cid
);
CREATE INDEX ON cluster_zres12_union USING gist(geometry);
DROP TABLE IF EXISTS cluster_zres9;
CREATE TABLE cluster_zres9 AS
(
WITH single_geom AS (
SELECT (ST_Dump(geometry)).geom AS geometry
FROM osm_landuse_polygon
WHERE landuse='residential'
)
SELECT ST_ClusterDBSCAN(geometry, eps := zres(9), minpoints := 1) over () AS cid,
geometry
FROM single_geom
);
CREATE INDEX ON cluster_zres9 USING gist(geometry);
DROP TABLE IF EXISTS cluster_zres9_union;
CREATE TABLE cluster_zres9_union AS
(
SELECT ST_Buffer(
ST_Union(
ST_Buffer(
ST_SnapToGrid(geometry, 1)
, zres(9), 'join=mitre'
)
), -zres(9), 'join=mitre'
) AS geometry
FROM cluster_zres9
GROUP BY cid
);
CREATE INDEX ON cluster_zres9_union USING gist(geometry);
-- For z6
-- etldoc: osm_landuse_polygon -> osm_residential_gen_z6
DROP TABLE IF EXISTS osm_residential_gen_z6 CASCADE;
CREATE TABLE osm_residential_gen_z6 AS
(
SELECT ST_SimplifyVW(geometry, power(zres(6), 2)) AS geometry
FROM cluster_zres9_union
WHERE ST_Area(geometry) > power(zres(6), 2)
);
CREATE INDEX ON osm_residential_gen_z6 USING gist(geometry);
-- For z7
-- etldoc: osm_landuse_polygon -> osm_residential_gen_z7
DROP TABLE IF EXISTS osm_residential_gen_z7 CASCADE;
CREATE TABLE osm_residential_gen_z7 AS
(
SELECT ST_SimplifyVW(geometry, power(zres(7), 2)) AS geometry
FROM cluster_zres12_union
WHERE ST_Area(geometry) > power(zres(6), 2)
);
CREATE INDEX ON osm_residential_gen_z7 USING gist(geometry);
-- For z8
-- etldoc: osm_landuse_polygon -> osm_residential_gen_z8
DROP TABLE IF EXISTS osm_residential_gen_z8 CASCADE;
CREATE TABLE osm_residential_gen_z8 AS
(
SELECT ST_SimplifyVW(geometry, power(zres(8), 2)) AS geometry
FROM cluster_zres12_union
WHERE ST_Area(geometry) > power(zres(7), 2)
);
CREATE INDEX ON osm_residential_gen_z8 USING gist(geometry);
-- For z9
-- etldoc: osm_landuse_polygon -> osm_residential_gen_z9
DROP TABLE IF EXISTS osm_residential_gen_z9 CASCADE;
CREATE TABLE osm_residential_gen_z9 AS
(
SELECT ST_SimplifyVW(geometry, power(zres(9), 2)) AS geometry
FROM cluster_zres12_union
WHERE ST_Area(geometry) > power(zres(9), 2)
);
CREATE INDEX ON osm_residential_gen_z9 USING gist(geometry);
-- For z10
-- etldoc: osm_landuse_polygon -> osm_residential_gen_z10
DROP TABLE IF EXISTS osm_residential_gen_z10 CASCADE;
CREATE TABLE osm_residential_gen_z10 AS
(
SELECT ST_SimplifyVW(geometry, power(zres(10), 2)) AS geometry
FROM cluster_zres14_union
WHERE ST_Area(geometry) > power(zres(10), 2)
);
CREATE INDEX ON osm_residential_gen_z10 USING gist(geometry);
-- For z11
-- etldoc: osm_landuse_polygon -> osm_residential_gen_z11
DROP TABLE IF EXISTS osm_residential_gen_z11 CASCADE;
CREATE TABLE osm_residential_gen_z11 AS
(
SELECT ST_SimplifyVW(geometry, power(zres(11), 2)) AS geometry
FROM cluster_zres14_union
WHERE ST_Area(geometry) > power(zres(11), 2)
);
CREATE INDEX ON osm_residential_gen_z11 USING gist(geometry);
-- For z12
-- etldoc: osm_landuse_polygon -> osm_residential_gen_z12
DROP TABLE IF EXISTS osm_residential_gen_z12 CASCADE;
CREATE TABLE osm_residential_gen_z12 AS
(
SELECT ST_SimplifyVW(geometry, power(zres(12), 2)) AS geometry
FROM cluster_zres14_union
WHERE ST_Area(geometry) > power(zres(12), 2)
);
CREATE INDEX ON osm_residential_gen_z12 USING gist(geometry);

View File

@ -1,369 +0,0 @@
{
"layers": [
{
"id": "landuse_classes",
"type": "fill",
"source": "openmaptiles",
"source-layer": "landuse",
"minzoom": 7,
"layout": {
"visibility": "visible"
},
"paint": {
"fill-color": [
"match",
[
"get",
"class"
],
"railway",
"#ebdbe8",
"residential",
"#e0dfdf",
"cemetery",
"#aacbaf",
"military",
"#fceaea",
"commercial",
"#f2dad9",
"industrial",
"#ebdbe8",
"garages",
"#dfddce",
"retail",
"#ffd6d1",
"bus_station",
"#e9e7e2",
"school",
"#ffffe5",
"university",
"#ffffe5",
"kindergarten",
"#ffffe5",
"college",
"#ffffe5",
"hospital",
"#ffffe5",
"stadium",
"#d5ffd9",
"pitch",
"#aae0cb",
"playground",
"#d5ffd9",
"track",
"#aae0cb",
"dam",
"#adadad",
"#000"
],
"fill-opacity": 1
},
"metadata": {},
"filter": [
"all",
[
"in",
"class",
"railway",
"cemetery",
"military",
"residential",
"commercial",
"industrial",
"garages",
"retail",
"bus_station",
"school",
"university",
"kindergarten",
"college",
"hospital",
"stadium",
"pitch",
"playground",
"track",
"dam"
],
[
"==",
"$type",
"Polygon"
]
],
"order": 1
},
{
"id": "landuse_residential",
"type": "fill",
"source": "openmaptiles",
"source-layer": "landuse",
"minzoom": 6,
"maxzoom": 24,
"layout": {
"visibility": "visible"
},
"paint": {
"fill-color": {
"stops": [
[
7,
"#d0d0d0"
],
[
11,
"#dddddd"
],
[
12,
"#e0dfdf"
]
]
}
},
"metadata": {},
"filter": [
"all",
[
"in",
"class",
"residential",
"suburbs",
"neighbourhood"
]
],
"order": 2
},
{
"id": "landuse_class_pattern",
"type": "fill",
"source": "openmaptiles",
"source-layer": "landuse",
"layout": {
"visibility": "visible"
},
"paint": {
"fill-color": "#000000",
"fill-opacity": 1,
"fill-pattern": [
"match",
[
"get",
"class"
],
"military",
"military_red_hatch",
"cemetery",
"grave_yard_generic",
""
]
},
"metadata": {},
"filter": [
"all",
[
"in",
"class",
"military",
"cemetery"
]
],
"order": 25
},
{
"id": "landuse_class_outline",
"type": "line",
"source": "openmaptiles",
"source-layer": "landuse",
"minzoom": 13,
"layout": {
"visibility": "visible"
},
"paint": {
"line-color": [
"match",
[
"get",
"class"
],
"railway",
"#c6b3c3",
"military",
"#ff5555",
"residential",
"#b9b9b9",
"commercial",
"#f2dad9",
"industrial",
"#c6b3c3",
"retail",
"#d99c95",
"school",
"#A6A68C",
"university",
"#A6A68C",
"kindergarten",
"#A6A68C",
"college",
"#A6A68C",
"hospital",
"#A6A68C",
"stadium",
"#7ca680",
"pitch",
"#7aaa97",
"playground",
"#3c6640",
"track",
"#7aaa96",
"theme_park",
"#660033",
"zoo",
"#660033",
"dam",
"#444444",
"#000"
],
"line-width": [
"match",
[
"get",
"class"
],
"railway",
0.7,
"military",
2,
"residential",
0.5,
"commercial",
0.5,
"industrial",
0.5,
"retail",
0.5,
"school",
0.3,
"university",
0.3,
"kindergarten",
0.3,
"college",
0.3,
"hospital",
0.3,
"stadium",
0.3,
"pitch",
0.5,
"playground",
0.3,
"track",
0.5,
"theme_park",
1,
"zoo",
1,
"dam",
2,
1
],
"line-offset": [
"match",
[
"get",
"class"
],
"military",
1,
0
],
"line-opacity": [
"match",
[
"get",
"class"
],
"military",
0.24,
1
]
},
"filter": [
"all",
[
"in",
"class",
"railway",
"military",
"residential",
"commercial",
"industrial",
"retail",
"school",
"university",
"kindergarten",
"college",
"hospital",
"stadium",
"pitch",
"playground",
"track",
"theme_park",
"zoo",
"dam"
]
],
"order": 26
},
{
"id": "landuse_class_themepark",
"type": "line",
"source": "openmaptiles",
"source-layer": "landuse",
"minzoom": 13,
"layout": {
"line-cap": "square",
"line-join": "round",
"visibility": "visible"
},
"paint": {
"line-color": "#660033",
"line-width": {
"stops": [
[
9,
3.5
],
[
14,
5.5
]
]
},
"line-offset": 2,
"line-opacity": {
"stops": [
[
9,
0.1
],
[
12,
0.3
]
]
}
},
"filter": [
"all",
[
"in",
"class",
"theme_park",
"zoo"
]
],
"order": 27
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

After

Width:  |  Height:  |  Size: 16 KiB

View File

@ -29,32 +29,3 @@ tables:
natural: natural:
- peak - peak
- volcano - volcano
- saddle
# etldoc: imposm3 -> osm_mountain_linestring
mountain_linestring:
type: linestring
columns:
- name: osm_id
type: id
- name: geometry
type: geometry
- name: name
key: name
type: string
- name: name_en
key: name:en
type: string
- name: name_de
key: name:de
type: string
- name: tags
type: hstore_tags
- name: wikipedia
key: wikipedia
type: string
mapping:
natural:
- ridge
- cliff
- arete

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 4.7 KiB

View File

@ -1,24 +1,5 @@
-- etldoc: osm_peak_point -> peak_point
-- etldoc: ne_10m_admin_0_countries -> peak_point
CREATE OR REPLACE VIEW peak_point AS
(
SELECT pp.osm_id,
pp.geometry,
pp.name,
pp.name_en,
pp.name_de,
pp.tags,
pp.ele,
ne.iso_a2,
pp.wikipedia
FROM osm_peak_point pp, ne_10m_admin_0_countries ne
WHERE ST_Intersects(pp.geometry, ne.geometry)
);
-- etldoc: layer_mountain_peak[shape=record fillcolor=lightpink, -- etldoc: layer_mountain_peak[shape=record fillcolor=lightpink,
-- etldoc: style="rounded,filled", label="layer_mountain_peak | <z7_> z7+ | <z13_> z13+" ] ; -- etldoc: style="rounded,filled", label="layer_mountain_peak | <z7_> z7+" ] ;
CREATE OR REPLACE FUNCTION layer_mountain_peak(bbox geometry, CREATE OR REPLACE FUNCTION layer_mountain_peak(bbox geometry,
zoom_level integer, zoom_level integer,
@ -34,13 +15,12 @@ CREATE OR REPLACE FUNCTION layer_mountain_peak(bbox geometry,
tags hstore, tags hstore,
ele int, ele int,
ele_ft int, ele_ft int,
customary_ft int,
"rank" int "rank" int
) )
AS AS
$$ $$
SELECT SELECT
-- etldoc: peak_point -> layer_mountain_peak:z7_ -- etldoc: osm_peak_point -> layer_mountain_peak:z7_
osm_id, osm_id,
geometry, geometry,
name, name,
@ -50,50 +30,6 @@ SELECT
tags, tags,
ele::int, ele::int,
ele_ft::int, ele_ft::int,
customary_ft,
rank::int
FROM (
SELECT osm_id,
geometry,
NULLIF(name, '') as name,
COALESCE(NULLIF(name_en, ''), NULLIF(name, '')) AS name_en,
COALESCE(NULLIF(name_de, ''), NULLIF(name, ''), NULLIF(name_en, '')) AS name_de,
tags,
substring(ele FROM E'^(-?\\d+)(\\D|$)')::int AS ele,
round(substring(ele FROM E'^(-?\\d+)(\\D|$)')::int * 3.2808399)::int AS ele_ft,
CASE WHEN iso_a2 = 'US' THEN 1 END AS customary_ft,
row_number() OVER (
PARTITION BY LabelGrid(geometry, 100 * pixel_width)
ORDER BY (
(CASE WHEN ele <> '' THEN substring(ele FROM E'^(-?\\d+)(\\D|$)')::int ELSE 0 END) +
(CASE WHEN wikipedia <> '' THEN 10000 ELSE 0 END) +
(CASE WHEN name <> '' THEN 10000 ELSE 0 END)
) DESC
)::int AS "rank"
FROM peak_point
WHERE geometry && bbox
AND (
(ele <> '' AND ele ~ E'^-?\\d{1,4}(\\D|$)')
OR name <> ''
)
) AS ranked_peaks
WHERE zoom_level >= 7
AND (rank <= 5 OR zoom_level >= 14)
UNION ALL
SELECT
-- etldoc: osm_mountain_linestring -> layer_mountain_peak:z13_
osm_id,
geometry,
name,
name_en,
name_de,
tags->'natural' AS class,
tags,
NULL AS ele,
NULL AS ele_ft,
NULL AS customary_ft,
rank::int rank::int
FROM ( FROM (
SELECT osm_id, SELECT osm_id,
@ -102,17 +38,23 @@ FROM (
COALESCE(NULLIF(name_en, ''), name) AS name_en, COALESCE(NULLIF(name_en, ''), name) AS name_en,
COALESCE(NULLIF(name_de, ''), name, name_en) AS name_de, COALESCE(NULLIF(name_de, ''), name, name_en) AS name_de,
tags, tags,
substring(ele FROM E'^(-?\\d+)(\\D|$)')::int AS ele,
round(substring(ele FROM E'^(-?\\d+)(\\D|$)')::int * 3.2808399)::int AS ele_ft,
row_number() OVER ( row_number() OVER (
PARTITION BY LabelGrid(geometry, 100 * pixel_width) PARTITION BY LabelGrid(geometry, 100 * pixel_width)
ORDER BY ( ORDER BY (
(CASE WHEN wikipedia <> '' THEN 10000 ELSE 0 END) + substring(ele FROM E'^(-?\\d+)(\\D|$)')::int +
(CASE WHEN name <> '' THEN 10000 ELSE 0 END) (CASE WHEN NULLIF(wikipedia, '') IS NOT NULL THEN 10000 ELSE 0 END) +
(CASE WHEN NULLIF(name, '') IS NOT NULL THEN 10000 ELSE 0 END)
) DESC ) DESC
)::int AS "rank" )::int AS "rank"
FROM osm_mountain_linestring FROM osm_peak_point
WHERE geometry && bbox WHERE geometry && bbox
) AS ranked_mountain_linestring AND ele IS NOT NULL
WHERE zoom_level >= 13 AND ele ~ E'^-?\\d{1,4}(\\D|$)'
) AS ranked_peaks
WHERE zoom_level >= 7
AND (rank <= 5 OR zoom_level >= 14)
ORDER BY "rank" ASC; ORDER BY "rank" ASC;
$$ LANGUAGE SQL STABLE $$ LANGUAGE SQL STABLE

View File

@ -1,44 +1,30 @@
layer: layer:
id: "mountain_peak" id: "mountain_peak"
requires:
tables:
- ne_10m_admin_0_countries
description: | description: |
[Natural peaks](http://wiki.openstreetmap.org/wiki/Tag:natural%3Dpeak) [Natural peaks](http://wiki.openstreetmap.org/wiki/Tag:natural%3Dpeak)
buffer_size: 64 buffer_size: 64
srs: +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over srs: +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over
fields: fields:
name: The OSM [`name`](http://wiki.openstreetmap.org/wiki/Key:name) value of the peak. Language-specific values are in `name:xx`. name: The OSM [`name`](http://wiki.openstreetmap.org/wiki/Key:name) value of the peak.
name_en: English name `name:en` if available, otherwise `name`. This is deprecated and will be removed in a future release in favor of `name:en`. name_en: English name `name:en` if available, otherwise `name`.
name_de: German name `name:de` if available, otherwise `name` or `name:en`. This is deprecated and will be removed in a future release in favor of `name:de`. name_de: German name `name:de` if available, otherwise `name` or `name:en`.
class: class:
description: | description: |
Use the **class** to differentiate between natural objects. Use the **class** to differentiate between mountain peak and volcano.
values: values:
- peak - peak
- volcano - volcano
- saddle
- ridge
- cliff
- arete
ele: Elevation (`ele`) in meters. ele: Elevation (`ele`) in meters.
ele_ft: Elevation (`ele`) in feet. ele_ft: Elevation (`ele`) in feets.
customary_ft:
description: |
Value 1 for peaks in location where feet is used as customary unit (USA).
values:
- 1
- NULL
rank: Rank of the peak within one tile (starting at 1 that is the most important peak). rank: Rank of the peak within one tile (starting at 1 that is the most important peak).
datasource: datasource:
geometry_field: geometry geometry_field: geometry
key_field: osm_id key_field: osm_id
key_field_as_attribute: no key_field_as_attribute: no
srid: 900913 srid: 900913
query: (SELECT osm_id, geometry, name, name_en, name_de, {name_languages}, class, ele, ele_ft, customary_ft, rank FROM layer_mountain_peak(!bbox!, z(!scale_denominator!), !pixel_width!)) AS t query: (SELECT osm_id, geometry, name, name_en, name_de, {name_languages}, class, ele, ele_ft, rank FROM layer_mountain_peak(!bbox!, z(!scale_denominator!), !pixel_width!)) AS t
schema: schema:
- ./update_peak_point.sql - ./update_peak_point.sql
- ./update_mountain_linestring.sql
- ./mountain_peak.sql - ./mountain_peak.sql
datasources: datasources:
- type: imposm3 - type: imposm3

View File

@ -1,101 +0,0 @@
{
"layers": [
{
"id": "mountain_peak",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "mountain_peak",
"maxzoom": 16,
"filter": [
"all",
[
"!in",
"class",
"cliff",
"volcano"
]
],
"layout": {
"text-size": 10,
"icon-image": "peak",
"text-field": {
"stops": [
[
6,
" "
],
[
12,
"{name} {ele}m"
]
]
},
"text-anchor": "top",
"text-offset": [
0,
0.5
],
"text-max-width": 6,
"text-line-height": 1.1,
"text-font": [
"Noto Sans Regular",
"Noto Sans Italic"
]
},
"paint": {
"text-color": "#6e441e",
"text-halo-color": "rgba(255, 255, 255, .8)",
"text-halo-width": 1
},
"order": 192
},
{
"id": "mountain_peak_volcano",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "mountain_peak",
"maxzoom": 16,
"filter": [
"all",
[
"==",
"class",
"volcano"
]
],
"layout": {
"text-size": 10,
"icon-image": "volcano",
"text-field": {
"stops": [
[
6,
" "
],
[
12,
"{name} {ele}m"
]
]
},
"text-anchor": "top",
"text-offset": [
0,
0.5
],
"text-max-width": 6,
"text-line-height": 1.1,
"text-font": [
"Noto Sans Regular",
"Noto Sans Italic"
]
},
"paint": {
"text-color": "#d40000",
"text-halo-color": "rgba(255, 255, 255, .8)",
"text-halo-width": 1
},
"order": 193
}
]
}

View File

@ -1,89 +0,0 @@
DROP TRIGGER IF EXISTS trigger_flag ON osm_mountain_linestring;
DROP TRIGGER IF EXISTS trigger_store ON osm_mountain_linestring;
DROP TRIGGER IF EXISTS trigger_refresh ON mountain_linestring.updates;
CREATE SCHEMA IF NOT EXISTS mountain_linestring;
CREATE TABLE IF NOT EXISTS mountain_linestring.osm_ids
(
osm_id bigint PRIMARY KEY
);
-- etldoc: osm_mountain_linestring -> osm_mountain_linestring
CREATE OR REPLACE FUNCTION update_osm_mountain_linestring(full_update boolean) RETURNS void AS
$$
UPDATE osm_mountain_linestring
SET tags = update_tags(tags, geometry)
WHERE (full_update OR osm_id IN (SELECT osm_id FROM mountain_linestring.osm_ids))
AND COALESCE(tags -> 'name:latin', tags -> 'name:nonlatin', tags -> 'name_int') IS NULL
AND tags != update_tags(tags, geometry)
$$ LANGUAGE SQL;
SELECT update_osm_mountain_linestring(true);
-- Handle updates
CREATE OR REPLACE FUNCTION mountain_linestring.store() RETURNS trigger AS
$$
BEGIN
INSERT INTO mountain_linestring.osm_ids VALUES (NEW.osm_id) ON CONFLICT (osm_id) DO NOTHING;
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
CREATE TABLE IF NOT EXISTS mountain_linestring.updates
(
id serial PRIMARY KEY,
t text,
UNIQUE (t)
);
CREATE OR REPLACE FUNCTION mountain_linestring.flag() RETURNS trigger AS
$$
BEGIN
INSERT INTO mountain_linestring.updates(t) VALUES ('y') ON CONFLICT(t) DO NOTHING;
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION mountain_linestring.refresh() RETURNS trigger AS
$$
DECLARE
t TIMESTAMP WITH TIME ZONE := clock_timestamp();
BEGIN
RAISE LOG 'Refresh mountain_linestring';
-- Analyze tracking and source tables before performing update
ANALYZE mountain_linestring.osm_ids;
ANALYZE osm_mountain_linestring;
PERFORM update_osm_mountain_linestring(false);
-- noinspection SqlWithoutWhere
DELETE FROM mountain_linestring.osm_ids;
-- noinspection SqlWithoutWhere
DELETE FROM mountain_linestring.updates;
RAISE LOG 'Refresh mountain_linestring done in %', age(clock_timestamp(), t);
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER trigger_store
AFTER INSERT OR UPDATE
ON osm_mountain_linestring
FOR EACH ROW
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE mountain_linestring.store();
CREATE TRIGGER trigger_flag
AFTER INSERT OR UPDATE
ON osm_mountain_linestring
FOR EACH STATEMENT
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE mountain_linestring.flag();
CREATE CONSTRAINT TRIGGER trigger_refresh
AFTER INSERT
ON mountain_linestring.updates
INITIALLY DEFERRED
FOR EACH ROW
EXECUTE PROCEDURE mountain_linestring.refresh();

View File

@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS mountain_peak_point;
CREATE TABLE IF NOT EXISTS mountain_peak_point.osm_ids CREATE TABLE IF NOT EXISTS mountain_peak_point.osm_ids
( (
osm_id bigint PRIMARY KEY osm_id bigint
); );
-- etldoc: osm_peak_point -> osm_peak_point -- etldoc: osm_peak_point -> osm_peak_point
@ -26,7 +26,11 @@ SELECT update_osm_peak_point(true);
CREATE OR REPLACE FUNCTION mountain_peak_point.store() RETURNS trigger AS CREATE OR REPLACE FUNCTION mountain_peak_point.store() RETURNS trigger AS
$$ $$
BEGIN BEGIN
INSERT INTO mountain_peak_point.osm_ids VALUES (NEW.osm_id) ON CONFLICT (osm_id) DO NOTHING; IF (tg_op = 'DELETE') THEN
INSERT INTO mountain_peak_point.osm_ids VALUES (OLD.osm_id);
ELSE
INSERT INTO mountain_peak_point.osm_ids VALUES (NEW.osm_id);
END IF;
RETURN NULL; RETURN NULL;
END; END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
@ -51,11 +55,6 @@ DECLARE
t TIMESTAMP WITH TIME ZONE := clock_timestamp(); t TIMESTAMP WITH TIME ZONE := clock_timestamp();
BEGIN BEGIN
RAISE LOG 'Refresh mountain_peak_point'; RAISE LOG 'Refresh mountain_peak_point';
-- Analyze tracking and source tables before performing update
ANALYZE mountain_peak_point.osm_ids;
ANALYZE osm_peak_point;
PERFORM update_osm_peak_point(false); PERFORM update_osm_peak_point(false);
-- noinspection SqlWithoutWhere -- noinspection SqlWithoutWhere
DELETE FROM mountain_peak_point.osm_ids; DELETE FROM mountain_peak_point.osm_ids;
@ -68,17 +67,15 @@ END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
CREATE TRIGGER trigger_store CREATE TRIGGER trigger_store
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_peak_point ON osm_peak_point
FOR EACH ROW FOR EACH ROW
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE mountain_peak_point.store(); EXECUTE PROCEDURE mountain_peak_point.store();
CREATE TRIGGER trigger_flag CREATE TRIGGER trigger_flag
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_peak_point ON osm_peak_point
FOR EACH STATEMENT FOR EACH STATEMENT
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE mountain_peak_point.flag(); EXECUTE PROCEDURE mountain_peak_point.flag();
CREATE CONSTRAINT TRIGGER trigger_refresh CREATE CONSTRAINT TRIGGER trigger_refresh

Binary file not shown.

Before

Width:  |  Height:  |  Size: 194 KiB

After

Width:  |  Height:  |  Size: 144 KiB

View File

@ -1,45 +1,33 @@
generalized_tables: generalized_tables:
# etldoc: osm_park_polygon_gen_z5 -> osm_park_polygon_gen_z4
park_polygon_gen_z4:
source: park_polygon_gen_z5
sql_filter: area>power(ZRES3,2)
tolerance: ZRES4
# etldoc: osm_park_polygon_gen_z6 -> osm_park_polygon_gen_z5
park_polygon_gen_z5:
source: park_polygon_gen_z6
sql_filter: area>power(ZRES4,2)
tolerance: ZRES5
# etldoc: osm_park_polygon_gen_z7 -> osm_park_polygon_gen_z6 # etldoc: osm_park_polygon_gen_z7 -> osm_park_polygon_gen_z6
park_polygon_gen_z6: park_polygon_gen_z6:
source: park_polygon_gen_z7 source: park_polygon_gen_z7
sql_filter: area>power(ZRES5,2) sql_filter: area>power(ZRES5,2)
tolerance: ZRES6 tolerance: ZRES8
# etldoc: osm_park_polygon_gen_z8 -> osm_park_polygon_gen_z7 # etldoc: osm_park_polygon_gen_z8 -> osm_park_polygon_gen_z7
park_polygon_gen_z7: park_polygon_gen_z7:
source: park_polygon_gen_z8 source: park_polygon_gen_z8
sql_filter: area>power(ZRES6,2) sql_filter: area>power(ZRES6,2)
tolerance: ZRES7 tolerance: ZRES8
# etldoc: osm_park_polygon_gen_z9 -> osm_park_polygon_gen_z8 # etldoc: osm_park_polygon_gen_z9 -> osm_park_polygon_gen_z8
park_polygon_gen_z8: park_polygon_gen_z8:
source: park_polygon_gen_z9 source: park_polygon_gen_z9
sql_filter: area>power(ZRES7,2) sql_filter: area>power(ZRES7,2)
tolerance: ZRES8 tolerance: ZRES9
# etldoc: osm_park_polygon_gen_z10 -> osm_park_polygon_gen_z9 # etldoc: osm_park_polygon_gen_z10 -> osm_park_polygon_gen_z9
park_polygon_gen_z9: park_polygon_gen_z9:
source: park_polygon_gen_z10 source: park_polygon_gen_z10
sql_filter: area>power(ZRES8,2) sql_filter: area>power(ZRES8,2)
tolerance: ZRES9 tolerance: ZRES10
# etldoc: osm_park_polygon_gen_z11 -> osm_park_polygon_gen_z10 # etldoc: osm_park_polygon_gen_z11 -> osm_park_polygon_gen_z10
park_polygon_gen_z10: park_polygon_gen_z10:
source: park_polygon_gen_z11 source: park_polygon_gen_z11
sql_filter: area>power(ZRES9,2) sql_filter: area>power(ZRES9,2)
tolerance: ZRES10 tolerance: ZRES11
# etldoc: osm_park_polygon_gen_z12 -> osm_park_polygon_gen_z11 # etldoc: osm_park_polygon_gen_z12 -> osm_park_polygon_gen_z11
park_polygon_gen_z11: park_polygon_gen_z11:

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

@ -1,5 +1,5 @@
-- etldoc: layer_park[shape=record fillcolor=lightpink, style="rounded,filled", -- etldoc: layer_park[shape=record fillcolor=lightpink, style="rounded,filled",
-- etldoc: label="layer_park |<z4> z4 |<z5> z5 |<z6> z6 |<z7> z7 |<z8> z8 |<z9> z9 |<z10> z10 |<z11> z11 |<z12> z12|<z13> z13|<z14> z14+" ] ; -- etldoc: label="layer_park |<z6> z6 |<z7> z7 |<z8> z8 |<z9> z9 |<z10> z10 |<z11> z11 |<z12> z12|<z13> z13|<z14> z14+" ] ;
CREATE OR REPLACE FUNCTION layer_park(bbox geometry, zoom_level int, pixel_width numeric) CREATE OR REPLACE FUNCTION layer_park(bbox geometry, zoom_level int, pixel_width numeric)
RETURNS TABLE RETURNS TABLE
@ -18,9 +18,9 @@ $$
SELECT osm_id, SELECT osm_id,
geometry, geometry,
class, class,
NULLIF(name, '') AS name, name,
NULLIF(name_en, '') AS name_en, name_en,
NULLIF(name_de, '') AS name_de, name_de,
tags, tags,
rank rank
FROM ( FROM (
@ -29,42 +29,14 @@ FROM (
COALESCE( COALESCE(
LOWER(REPLACE(NULLIF(protection_title, ''), ' ', '_')), LOWER(REPLACE(NULLIF(protection_title, ''), ' ', '_')),
NULLIF(boundary, ''), NULLIF(boundary, ''),
NULLIF(leisure, '')) NULLIF(leisure, '')
AS class, ) AS class,
name, name,
name_en, name_en,
name_de, name_de,
tags, tags,
NULL::int AS rank NULL::int AS rank
FROM ( FROM (
-- etldoc: osm_park_polygon_dissolve_z4 -> layer_park:z4
SELECT NULL::int AS osm_id,
geometry,
NULL AS name,
NULL AS name_en,
NULL AS name_de,
NULL AS tags,
NULL AS leisure,
NULL AS boundary,
NULL AS protection_title
FROM osm_park_polygon_dissolve_z4
WHERE zoom_level = 4
AND geometry && bbox
UNION ALL
-- etldoc: osm_park_polygon_gen_z5 -> layer_park:z5
SELECT osm_id,
geometry,
name,
name_en,
name_de,
tags,
leisure,
boundary,
protection_title
FROM osm_park_polygon_gen_z5
WHERE zoom_level = 5
AND geometry && bbox
UNION ALL
-- etldoc: osm_park_polygon_gen_z6 -> layer_park:z6 -- etldoc: osm_park_polygon_gen_z6 -> layer_park:z6
SELECT osm_id, SELECT osm_id,
geometry, geometry,
@ -212,23 +184,6 @@ FROM (
area DESC area DESC
)::int AS "rank" )::int AS "rank"
FROM ( FROM (
-- etldoc: osm_park_polygon_gen_z5 -> layer_park:z5
SELECT osm_id,
geometry_point,
name,
name_en,
name_de,
tags,
leisure,
boundary,
protection_title,
area
FROM osm_park_polygon_gen_z5
WHERE zoom_level = 5
AND geometry_point && bbox
AND area > 70000*2^(20-zoom_level)
UNION ALL
-- etldoc: osm_park_polygon_gen_z6 -> layer_park:z6 -- etldoc: osm_park_polygon_gen_z6 -> layer_park:z6
SELECT osm_id, SELECT osm_id,
geometry_point, geometry_point,

View File

@ -1,15 +1,15 @@
layer: layer:
id: "park" id: "park"
description: | description: |
The park layer in OpenMapTiles contains natural and protected areas from OpenStreetMap, The park layer contains parks from OpenStreetMap tagged with
such as parks tagged with [`boundary=national_park`](https://wiki.openstreetmap.org/wiki/Tag:boundary%3Dnational_park), [`boundary=national_park`](http://wiki.openstreetmap.org/wiki/Tag:boundary%3Dnational_park),
[`boundary=protected_area`](https://wiki.openstreetmap.org/wiki/Tag:boundary%3Dprotected_area), [`boundary=protected_area`](http://wiki.openstreetmap.org/wiki/Tag:boundary%3Dprotected_area),
or [`leisure=nature_reserve`](https://wiki.openstreetmap.org/wiki/Tag:leisure%3Dnature_reserve). or [`leisure=nature_reserve`](http://wiki.openstreetmap.org/wiki/Tag:leisure%3Dnature_reserve).
buffer_size: 4 buffer_size: 4
fields: fields:
class: class:
description: | description: |
Use the **class** to differentiate between different kinds of features in the `parks` layer. Use the **class** to differentiate between different parks.
The class for `boundary=protected_area` parks is the lower-case of the The class for `boundary=protected_area` parks is the lower-case of the
[`protection_title`](http://wiki.openstreetmap.org/wiki/key:protection_title) [`protection_title`](http://wiki.openstreetmap.org/wiki/key:protection_title)
value with blanks replaced by `_`. value with blanks replaced by `_`.
@ -17,9 +17,9 @@ layer:
`nature_reserve` is the class of `protection_title=Nature Reserve` and `leisure=nature_reserve`. `nature_reserve` is the class of `protection_title=Nature Reserve` and `leisure=nature_reserve`.
The class for other [`protection_title`](http://wiki.openstreetmap.org/wiki/key:protection_title) The class for other [`protection_title`](http://wiki.openstreetmap.org/wiki/key:protection_title)
values is similarly assigned. values is similarly assigned.
name: The OSM [`name`](http://wiki.openstreetmap.org/wiki/Key:name) value of the park (point features only). Language-specific values are in `name:xx`. name: The OSM [`name`](http://wiki.openstreetmap.org/wiki/Key:name) value of the park (point features only).
name_en: English name `name:en` if available, otherwise `name` (point features only). This is deprecated and will be removed in a future release in favor of `name:en`. name_en: English name `name:en` if available, otherwise `name` (point features only).
name_de: German name `name:de` if available, otherwise `name` or `name:en` (point features only). This is deprecated and will be removed in a future release in favor of `name:de`. name_de: German name `name:de` if available, otherwise `name` or `name:en` (point features only).
rank: Rank of the park within one tile, starting at 1 that is the most important park (point features only). rank: Rank of the park within one tile, starting at 1 that is the most important park (point features only).
datasource: datasource:
geometry_field: geometry geometry_field: geometry

View File

@ -1,111 +0,0 @@
{
"layers": [
{
"id": "national_parks",
"type": "line",
"source": "openmaptiles",
"source-layer": "park",
"minzoom": 8,
"layout": {
"visibility": "visible"
},
"paint": {
"line-color": "rgba(154, 199, 136, 1)",
"line-width": {
"base": 1,
"stops": [
[
8,
1.2
],
[
9,
1.5
],
[
10,
3.6
],
[
24,
3.6
]
]
},
"line-offset": 1,
"line-opacity": 0.8
},
"order": 20
},
{
"id": "national_parks_thin",
"type": "line",
"source": "openmaptiles",
"source-layer": "park",
"minzoom": 10,
"layout": {
"visibility": "none"
},
"paint": {
"line-color": "rgba(93, 156, 76, 1)",
"line-width": 1.5
},
"order": 21
},
{
"id": "park-national",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "park",
"minzoom": 7,
"maxzoom": 12,
"layout": {
"text-font": [
"Noto Sans Italic"
],
"text-size": 12,
"text-field": "{name:latin}{name:nonlatin}",
"visibility": "visible",
"symbol-spacing": 150,
"text-allow-overlap": false
},
"paint": {
"text-color": {
"stops": [
[
7,
"rgba(70, 164, 70, 1)"
],
[
10,
"#008000"
]
]
},
"text-halo-blur": 0.1,
"text-halo-color": {
"stops": [
[
7,
"rgba(241, 255, 234, 1)"
],
[
10,
"rgba(208, 250, 200, 1)"
]
]
},
"text-halo-width": 0.3
},
"filter": [
"all",
[
"<=",
"rank",
2
]
],
"order": 190
}
]
}

View File

@ -16,26 +16,6 @@ ALTER TABLE osm_park_polygon_gen_z7
ADD COLUMN IF NOT EXISTS geometry_point geometry; ADD COLUMN IF NOT EXISTS geometry_point geometry;
ALTER TABLE osm_park_polygon_gen_z6 ALTER TABLE osm_park_polygon_gen_z6
ADD COLUMN IF NOT EXISTS geometry_point geometry; ADD COLUMN IF NOT EXISTS geometry_point geometry;
ALTER TABLE osm_park_polygon_gen_z5
ADD COLUMN IF NOT EXISTS geometry_point geometry;
-- etldoc: osm_park_polygon_gen_z4 -> osm_park_polygon_dissolve_z4
DROP MATERIALIZED VIEW IF EXISTS osm_park_polygon_dissolve_z4 CASCADE;
CREATE MATERIALIZED VIEW osm_park_polygon_dissolve_z4 AS
(
SELECT min(osm_id) AS osm_id,
ST_Union(geometry) AS geometry,
boundary
FROM (
SELECT ST_ClusterDBSCAN(geometry, 0, 1) OVER() AS cluster,
osm_id,
geometry,
boundary
FROM osm_park_polygon_gen_z4
) park_cluster
GROUP BY boundary, cluster
);
CREATE UNIQUE INDEX IF NOT EXISTS osm_park_polygon_dissolve_idx ON osm_park_polygon_dissolve_z4 (osm_id);
DROP TRIGGER IF EXISTS update_row ON osm_park_polygon; DROP TRIGGER IF EXISTS update_row ON osm_park_polygon;
DROP TRIGGER IF EXISTS update_row ON osm_park_polygon_gen_z13; DROP TRIGGER IF EXISTS update_row ON osm_park_polygon_gen_z13;
@ -46,10 +26,6 @@ DROP TRIGGER IF EXISTS update_row ON osm_park_polygon_gen_z9;
DROP TRIGGER IF EXISTS update_row ON osm_park_polygon_gen_z8; DROP TRIGGER IF EXISTS update_row ON osm_park_polygon_gen_z8;
DROP TRIGGER IF EXISTS update_row ON osm_park_polygon_gen_z7; DROP TRIGGER IF EXISTS update_row ON osm_park_polygon_gen_z7;
DROP TRIGGER IF EXISTS update_row ON osm_park_polygon_gen_z6; DROP TRIGGER IF EXISTS update_row ON osm_park_polygon_gen_z6;
DROP TRIGGER IF EXISTS update_row ON osm_park_polygon_gen_z5;
DROP TRIGGER IF EXISTS update_row ON osm_park_polygon_gen_z4;
DROP TRIGGER IF EXISTS trigger_flag ON osm_park_polygon;
DROP TRIGGER IF EXISTS trigger_refresh ON park_polygon.updates;
-- etldoc: osm_park_polygon -> osm_park_polygon -- etldoc: osm_park_polygon -> osm_park_polygon
-- etldoc: osm_park_polygon_gen_z13 -> osm_park_polygon_gen_z13 -- etldoc: osm_park_polygon_gen_z13 -> osm_park_polygon_gen_z13
@ -60,52 +36,45 @@ DROP TRIGGER IF EXISTS trigger_refresh ON park_polygon.updates;
-- etldoc: osm_park_polygon_gen_z8 -> osm_park_polygon_gen_z8 -- etldoc: osm_park_polygon_gen_z8 -> osm_park_polygon_gen_z8
-- etldoc: osm_park_polygon_gen_z7 -> osm_park_polygon_gen_z7 -- etldoc: osm_park_polygon_gen_z7 -> osm_park_polygon_gen_z7
-- etldoc: osm_park_polygon_gen_z6 -> osm_park_polygon_gen_z6 -- etldoc: osm_park_polygon_gen_z6 -> osm_park_polygon_gen_z6
-- etldoc: osm_park_polygon_gen_z5 -> osm_park_polygon_gen_z5
-- etldoc: osm_park_polygon_gen_z4 -> osm_park_polygon_gen_z4
CREATE OR REPLACE FUNCTION update_osm_park_polygon() RETURNS void AS CREATE OR REPLACE FUNCTION update_osm_park_polygon() RETURNS void AS
$$ $$
BEGIN BEGIN
UPDATE osm_park_polygon UPDATE osm_park_polygon
SET tags = update_tags(tags, geometry), SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry); geometry_point = st_centroid(geometry);
UPDATE osm_park_polygon_gen_z13 UPDATE osm_park_polygon_gen_z13
SET tags = update_tags(tags, geometry), SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry); geometry_point = st_centroid(geometry);
UPDATE osm_park_polygon_gen_z12 UPDATE osm_park_polygon_gen_z12
SET tags = update_tags(tags, geometry), SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry); geometry_point = st_centroid(geometry);
UPDATE osm_park_polygon_gen_z11 UPDATE osm_park_polygon_gen_z11
SET tags = update_tags(tags, geometry), SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry); geometry_point = st_centroid(geometry);
UPDATE osm_park_polygon_gen_z10 UPDATE osm_park_polygon_gen_z10
SET tags = update_tags(tags, geometry), SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry); geometry_point = st_centroid(geometry);
UPDATE osm_park_polygon_gen_z9 UPDATE osm_park_polygon_gen_z9
SET tags = update_tags(tags, geometry), SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry); geometry_point = st_centroid(geometry);
UPDATE osm_park_polygon_gen_z8 UPDATE osm_park_polygon_gen_z8
SET tags = update_tags(tags, geometry), SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry); geometry_point = st_centroid(geometry);
UPDATE osm_park_polygon_gen_z7 UPDATE osm_park_polygon_gen_z7
SET tags = update_tags(tags, geometry), SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry); geometry_point = st_centroid(geometry);
UPDATE osm_park_polygon_gen_z6 UPDATE osm_park_polygon_gen_z6
SET tags = update_tags(tags, geometry), SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry); geometry_point = st_centroid(geometry);
UPDATE osm_park_polygon_gen_z5
SET tags = update_tags(tags, geometry),
geometry_point = ST_PointOnSurface(geometry);
REFRESH MATERIALIZED VIEW CONCURRENTLY osm_park_polygon_dissolve_z4;
END; END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
@ -119,45 +88,7 @@ CREATE INDEX IF NOT EXISTS osm_park_polygon_gen_z9_point_geom_idx ON osm_park_po
CREATE INDEX IF NOT EXISTS osm_park_polygon_gen_z8_point_geom_idx ON osm_park_polygon_gen_z8 USING gist (geometry_point); CREATE INDEX IF NOT EXISTS osm_park_polygon_gen_z8_point_geom_idx ON osm_park_polygon_gen_z8 USING gist (geometry_point);
CREATE INDEX IF NOT EXISTS osm_park_polygon_gen_z7_point_geom_idx ON osm_park_polygon_gen_z7 USING gist (geometry_point); CREATE INDEX IF NOT EXISTS osm_park_polygon_gen_z7_point_geom_idx ON osm_park_polygon_gen_z7 USING gist (geometry_point);
CREATE INDEX IF NOT EXISTS osm_park_polygon_gen_z6_point_geom_idx ON osm_park_polygon_gen_z6 USING gist (geometry_point); CREATE INDEX IF NOT EXISTS osm_park_polygon_gen_z6_point_geom_idx ON osm_park_polygon_gen_z6 USING gist (geometry_point);
CREATE INDEX IF NOT EXISTS osm_park_polygon_gen_z5_point_geom_idx ON osm_park_polygon_gen_z5 USING gist (geometry_point);
CREATE INDEX IF NOT EXISTS osm_park_polygon_gen_z4_polygon_geom_idx ON osm_park_polygon_gen_z4 USING gist (geometry);
CREATE INDEX IF NOT EXISTS osm_park_polygon_dissolve_z4_polygon_geom_idx ON osm_park_polygon_dissolve_z4 USING gist (geometry);
CREATE SCHEMA IF NOT EXISTS park_polygon;
CREATE TABLE IF NOT EXISTS park_polygon.updates
(
id serial PRIMARY KEY,
t text,
UNIQUE (t)
);
CREATE OR REPLACE FUNCTION park_polygon.flag() RETURNS trigger AS
$$
BEGIN
INSERT INTO park_polygon.updates(t) VALUES ('y') ON CONFLICT(t) DO NOTHING;
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION park_polygon.refresh() RETURNS trigger AS
$$
DECLARE
t TIMESTAMP WITH TIME ZONE := clock_timestamp();
BEGIN
RAISE LOG 'Refresh park_polygon';
-- Analyze tracking and source tables before performing update
ANALYZE osm_park_polygon_gen_z4;
REFRESH MATERIALIZED VIEW CONCURRENTLY osm_park_polygon_dissolve_z4;
-- noinspection SqlWithoutWhere
DELETE FROM park_polygon.updates;
RAISE LOG 'Refresh park_polygon done in %', age(clock_timestamp(), t);
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION update_osm_park_polygon_row() CREATE OR REPLACE FUNCTION update_osm_park_polygon_row()
RETURNS trigger RETURNS trigger
@ -165,17 +96,7 @@ AS
$$ $$
BEGIN BEGIN
NEW.tags = update_tags(NEW.tags, NEW.geometry); NEW.tags = update_tags(NEW.tags, NEW.geometry);
NEW.geometry_point = ST_PointOnSurface(NEW.geometry); NEW.geometry_point = st_centroid(NEW.geometry);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION update_osm_park_dissolved_polygon_row()
RETURNS trigger
AS
$$
BEGIN
NEW.tags = update_tags(NEW.tags, NEW.geometry);
RETURN NEW; RETURN NEW;
END; END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
@ -233,28 +154,3 @@ CREATE TRIGGER update_row
ON osm_park_polygon_gen_z6 ON osm_park_polygon_gen_z6
FOR EACH ROW FOR EACH ROW
EXECUTE PROCEDURE update_osm_park_polygon_row(); EXECUTE PROCEDURE update_osm_park_polygon_row();
CREATE TRIGGER update_row
BEFORE INSERT OR UPDATE
ON osm_park_polygon_gen_z5
FOR EACH ROW
EXECUTE PROCEDURE update_osm_park_polygon_row();
CREATE TRIGGER update_row
BEFORE INSERT OR UPDATE
ON osm_park_polygon_gen_z4
FOR EACH ROW
EXECUTE PROCEDURE update_osm_park_dissolved_polygon_row();
CREATE TRIGGER trigger_flag
AFTER INSERT OR UPDATE OR DELETE
ON osm_park_polygon
FOR EACH STATEMENT
EXECUTE PROCEDURE park_polygon.flag();
CREATE CONSTRAINT TRIGGER trigger_refresh
AFTER INSERT
ON park_polygon.updates
INITIALLY DEFERRED
FOR EACH ROW
EXECUTE PROCEDURE park_polygon.refresh();

View File

@ -1,14 +0,0 @@
CREATE OR REPLACE FUNCTION area_rank(area real) RETURNS int AS
$$
SELECT CASE
WHEN area > 640000000 THEN 1
WHEN area > 160000000 THEN 2
WHEN area > 40000000 THEN 3
WHEN area > 15000000 THEN 4
WHEN area > 10000000 THEN 5
WHEN area > 0 THEN 6
ELSE 7
END;
$$ LANGUAGE SQL IMMUTABLE
STRICT
PARALLEL SAFE;

View File

@ -2,8 +2,8 @@ CREATE OR REPLACE FUNCTION normalize_capital_level(capital text)
RETURNS int AS RETURNS int AS
$$ $$
SELECT CASE SELECT CASE
WHEN capital = 'yes' THEN 2 WHEN capital IN ('yes', '2') THEN 2
WHEN capital IN ('2', '3', '4', '5', '6') THEN capital::int WHEN capital = '4' THEN 4
END; END;
$$ LANGUAGE SQL IMMUTABLE $$ LANGUAGE SQL IMMUTABLE
STRICT STRICT

Binary file not shown.

Before

Width:  |  Height:  |  Size: 152 KiB

After

Width:  |  Height:  |  Size: 137 KiB

View File

@ -0,0 +1,12 @@
CREATE OR REPLACE FUNCTION island_rank(area real) RETURNS int AS
$$
SELECT CASE
WHEN area < 10000000 THEN 6
WHEN area BETWEEN 1000000 AND 15000000 THEN 5
WHEN area BETWEEN 15000000 AND 40000000 THEN 4
WHEN area > 40000000 THEN 3
ELSE 7
END;
$$ LANGUAGE SQL IMMUTABLE
STRICT
PARALLEL SAFE;

View File

@ -124,9 +124,6 @@ tables:
- *name_de - *name_de
- name: tags - name: tags
type: hstore_tags type: hstore_tags
- name: place
key: place
type: string
- name: is_in_country - name: is_in_country
key: is_in:country key: is_in:country
type: string type: string
@ -143,7 +140,6 @@ tables:
mapping: mapping:
place: place:
- state - state
- province
# etldoc: imposm3 -> osm_city_point # etldoc: imposm3 -> osm_city_point
city_point: city_point:
@ -177,7 +173,6 @@ tables:
- town - town
- village - village
- hamlet - hamlet
- borough
- suburb - suburb
- quarter - quarter
- neighbourhood - neighbourhood

Binary file not shown.

Before

Width:  |  Height:  |  Size: 55 KiB

After

Width:  |  Height:  |  Size: 51 KiB

View File

@ -70,7 +70,7 @@ FROM (
COALESCE(NULLIF(name_en, ''), name) AS name_en, COALESCE(NULLIF(name_en, ''), name) AS name_en,
COALESCE(NULLIF(name_de, ''), name, name_en) AS name_de, COALESCE(NULLIF(name_de, ''), name, name_en) AS name_de,
tags, tags,
place::text AS class, 'state' AS class,
"rank", "rank",
NULL::int AS capital, NULL::int AS capital,
NULL::text AS iso_a2 NULL::text AS iso_a2
@ -109,39 +109,17 @@ FROM (
COALESCE(NULLIF(name_de, ''), name, name_en) AS name_de, COALESCE(NULLIF(name_de, ''), name, name_en) AS name_de,
tags, tags,
'island' AS class, 'island' AS class,
area_rank(area) AS "rank", island_rank(area) AS "rank",
NULL::int AS capital, NULL::int AS capital,
NULL::text AS iso_a2 NULL::text AS iso_a2
FROM osm_island_polygon FROM osm_island_polygon
WHERE geometry && bbox WHERE geometry && bbox
AND ((zoom_level = 8 AND area_rank(area) <= 3) AND ((zoom_level = 8 AND island_rank(area) <= 3)
OR (zoom_level = 9 AND area_rank(area) <= 4) OR (zoom_level = 9 AND island_rank(area) <= 4)
OR (zoom_level >= 10)) OR (zoom_level >= 10))
UNION ALL UNION ALL
SELECT
-- etldoc: osm_boundary_polygon -> layer_place:z6_11
-- etldoc: osm_boundary_polygon -> layer_place:z12_14
osm_id * 10 AS osm_id,
geometry_point,
name,
NULL::text AS name_en, -- deprecated
NULL::text AS name_de, -- deprecated
tags,
'aboriginal_lands' AS class,
area_rank(area) AS "rank",
NULL::int AS capital,
NULL::text AS iso_a2
FROM osm_boundary_polygon
WHERE geometry_point && bbox
AND ((zoom_level = 6 AND area_rank(area) <= 1)
OR (zoom_level = 7 AND area_rank(area) <= 2)
OR (zoom_level = 8 AND area_rank(area) <= 3)
OR (zoom_level = 9 AND area_rank(area) <= 4)
OR (zoom_level >= 10))
UNION ALL
SELECT SELECT
-- etldoc: layer_city -> layer_place:z0_3 -- etldoc: layer_city -> layer_place:z0_3
-- etldoc: layer_city -> layer_place:z4_7 -- etldoc: layer_city -> layer_place:z4_7

View File

@ -1,54 +1,40 @@
layer: layer:
id: "place" id: "place"
requires:
tables:
- ne_10m_admin_1_states_provinces
- ne_10m_admin_0_countries
- ne_10m_populated_places
layers:
- boundary
description: | description: |
The place layer consists out of [countries](http://wiki.openstreetmap.org/wiki/Tag:place%3Dcountry), The place layer consists out of [countries](http://wiki.openstreetmap.org/wiki/Tag:place%3Dcountry),
[states](http://wiki.openstreetmap.org/wiki/Tag:place%3Dstate), [cities](http://wiki.openstreetmap.org/wiki/Key:place) [states](http://wiki.openstreetmap.org/wiki/Tag:place%3Dstate) and [cities](http://wiki.openstreetmap.org/wiki/Key:place).
and [islands](https://wiki.openstreetmap.org/wiki/Tag:place%3Disland).
Apart from the roads this is also one of the more important layers to create a beautiful map. Apart from the roads this is also one of the more important layers to create a beautiful map.
We suggest you use different font styles and sizes to create a text hierarchy. We suggest you use different font styles and sizes to create a text hierarchy.
fields: fields:
name: The OSM [`name`](http://wiki.openstreetmap.org/wiki/Key:name) value of the place. Language-specific values are in `name:xx`. name: The OSM [`name`](http://wiki.openstreetmap.org/wiki/Key:name) value of the POI.
name_en: English name `name:en` if available, otherwise `name`. This is deprecated and will be removed in a future release in favor of `name:en`. name_en: English name `name:en` if available, otherwise `name`.
name_de: German name `name:de` if available, otherwise `name` or `name:en`. This is deprecated and will be removed in a future release in favor of `name:de`. name_de: German name `name:de` if available, otherwise `name` or `name:en`.
capital: capital:
description: | description: |
The **capital** field marks the The **capital** field marks the
[`admin_level`](http://wiki.openstreetmap.org/wiki/Tag:boundary%3Dadministrative#admin_level) [`admin_level`](http://wiki.openstreetmap.org/wiki/Tag:boundary%3Dadministrative#admin_level)
of the boundary the place is a capital of. of the boundary the place is a capital of.
values: [2, 3, 4, 5, 6] values: [2, 4]
class: class:
description: | description: |
Original value of the Original value of the
[`place`](http://wiki.openstreetmap.org/wiki/Key:place) tag. [`place`](http://wiki.openstreetmap.org/wiki/Key:place) tag.
Distinguish between continents, countries, states, islands and Distinguish between continents, countries, states and
places like settlements or smaller entities. places like settlements or smaller entities.
Use **class** to separately style the different places and build Use **class** to separately style the different places and build
a text hierarchy according to their importance. For places derived a text hierarchy according to their importance.
from boundaries, the original value of the
[`boundary`](http://wiki.openstreetmap.org/wiki/Key:boundary) tag.
values: values:
- continent - continent
- country - country
- state - state
- province
- city - city
- town - town
- village - village
- hamlet - hamlet
- borough
- suburb - suburb
- quarter - quarter
- neighbourhood - neighbourhood
- isolated_dwelling - isolated_dwelling
- island
- aboriginal_lands
iso_a2: iso_a2:
description: | description: |
Two-letter country code [ISO 3166-1 alpha-2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2). Available only for `class=country`. Two-letter country code [ISO 3166-1 alpha-2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2). Available only for `class=country`.
@ -58,7 +44,7 @@ layer:
description: | description: |
Countries, states and the most important cities all have a Countries, states and the most important cities all have a
**rank** to boost their importance on the map. **rank** to boost their importance on the map.
The **rank** field for countries and states ranges from The **rank** field for counries and states ranges from
`1` to `6` while the **rank** field for cities ranges from `1` to `6` while the **rank** field for cities ranges from
`1` to `10` for the most important cities `1` to `10` for the most important cities
and continues from `10` serially based on the and continues from `10` serially based on the
@ -80,7 +66,7 @@ schema:
- ./types.sql - ./types.sql
- ./capital.sql - ./capital.sql
- ./city.sql - ./city.sql
- ./area_rank.sql - ./island_rank.sql
- ./update_continent_point.sql - ./update_continent_point.sql
- ./update_country_point.sql - ./update_country_point.sql
- ./update_island_polygon.sql - ./update_island_polygon.sql

View File

@ -1,662 +0,0 @@
{
"layers": [
{
"id": "place_other",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "place",
"minzoom": 8,
"layout": {
"text-font": [
"Noto Sans Regular"
],
"text-size": {
"base": 1.2,
"stops": [
[
11,
10
],
[
14,
14
],
[
18,
16
]
]
},
"text-field": "{name:latin}\n{name:nonlatin}",
"visibility": "visible",
"symbol-spacing": 150,
"text-max-width": 10,
"text-transform": "none"
},
"paint": {
"text-color": {
"stops": [
[
12.5,
"#222222"
],
[
12.6,
"#777777"
]
]
},
"text-halo-blur": 0,
"text-halo-color": {
"stops": [
[
11,
"rgba(255,255,255,0.6)"
],
[
13,
"#ffffff"
]
]
},
"text-halo-width": {
"stops": [
[
8,
0.8
],
[
13,
1.5
]
]
}
},
"metadata": {},
"filter": [
"all",
[
"in",
"class",
"hamlet",
"island",
"islet",
"neighbourhood",
"suburb",
"borough"
]
],
"order": 188
},
{
"id": "place_village",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "place",
"minzoom": 8,
"layout": {
"text-font": [
"Noto Sans Regular"
],
"text-size": {
"base": 1.2,
"stops": [
[
10,
10
],
[
15,
16
]
]
},
"text-field": "{name:latin}\n{name:nonlatin}",
"visibility": "visible",
"text-max-width": 8
},
"paint": {
"text-color": "#333",
"text-halo-color": "rgba(255,255,255,0.8)",
"text-halo-width": 1.2
},
"metadata": {},
"filter": [
"all",
[
"==",
"class",
"village"
]
],
"order": 194
},
{
"id": "place_town",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "place",
"minzoom": 6,
"layout": {
"text-font": [
"Noto Sans Regular"
],
"text-size": {
"base": 1.2,
"stops": [
[
7,
10
],
[
11,
13
]
]
},
"text-field": "{name:latin}\n{name:nonlatin}",
"visibility": "visible",
"text-anchor": "bottom",
"text-offset": [
0,
0
],
"text-max-width": 8
},
"paint": {
"text-color": "#333",
"text-halo-color": "rgba(255,255,255,0.8)",
"text-halo-width": 1.2
},
"metadata": {},
"filter": [
"all",
[
"==",
"class",
"town"
]
],
"order": 195
},
{
"id": "place_state",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "place",
"minzoom": 4,
"maxzoom": 12,
"layout": {
"text-font": [
"Noto Sans Regular",
"Noto Sans Bold"
],
"text-size": {
"stops": [
[
3,
10
],
[
6,
14
]
]
},
"text-field": "{name:latin}",
"visibility": "visible",
"text-padding": 2,
"text-transform": "none",
"text-letter-spacing": 0
},
"paint": {
"text-color": "#7e587d",
"text-halo-color": "rgba(255,255,255,0.7)",
"text-halo-width": 0.8
},
"metadata": {},
"filter": [
"all",
[
"==",
"class",
"state"
],
[
"<",
"rank",
3
]
],
"order": 196
},
{
"id": "place_city",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "place",
"minzoom": 4,
"maxzoom": 14,
"layout": {
"text-font": [
"Noto Sans Regular"
],
"text-size": {
"base": 1.2,
"stops": [
[
4,
12
],
[
15,
18
]
]
},
"icon-image": {
"stops": [
[
4,
"place-6"
],
[
7,
" "
]
]
},
"text-field": "{name:latin}\n{name:nonlatin}",
"visibility": "visible",
"icon-offset": [
0,
3
],
"text-anchor": "bottom",
"text-offset": [
0,
0
],
"icon-optional": false,
"text-max-width": 8,
"icon-allow-overlap": true
},
"paint": {
"text-color": {
"stops": [
[
6,
"rgba(88, 88, 88, 1)"
],
[
14,
"rgba(32, 32, 32, 1)"
]
]
},
"text-halo-color": "rgba(255,255,255,0.8)",
"text-halo-width": 1
},
"metadata": {},
"filter": [
"all",
[
"==",
"class",
"city"
],
[
"!=",
"rank",
1
]
],
"order": 197
},
{
"id": "place_capital",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "place",
"minzoom": 3,
"maxzoom": 15,
"layout": {
"icon-size": 1,
"text-font": [
"Noto Sans Regular"
],
"text-size": {
"base": 1.2,
"stops": [
[
4,
11
],
[
12,
16
]
]
},
"icon-image": {
"stops": [
[
6,
"place-capital-8"
],
[
8,
""
]
]
},
"text-field": "{name:latin}\n{name:nonlatin}",
"visibility": "visible",
"icon-offset": [
0,
3
],
"text-anchor": "bottom",
"text-offset": [
0,
0
],
"icon-optional": false,
"text-max-width": 8,
"icon-allow-overlap": true
},
"paint": {
"text-color": {
"stops": [
[
6,
"rgba(73, 73, 73, 1)"
],
[
14,
"rgba(32, 32, 32, 1)"
]
]
},
"text-halo-color": "rgba(255,255,255,0.8)",
"text-halo-width": 1.2
},
"metadata": {},
"filter": [
"all",
[
"==",
"class",
"city"
],
[
"in",
"capital",
1,
2
]
],
"order": 198
},
{
"id": "country_other",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "place",
"minzoom": 4,
"maxzoom": 15,
"layout": {
"text-font": [
"Noto Sans Regular"
],
"text-size": {
"stops": [
[
3,
11
],
[
5,
13
],
[
7,
20
]
]
},
"text-field": "{name:latin}",
"visibility": "visible",
"text-max-width": 6.25,
"text-transform": "none"
},
"paint": {
"text-color": "rgba(131, 81, 130, 1)",
"text-halo-blur": 1,
"text-halo-color": "rgba(255,255,255,0.8)",
"text-halo-width": 0.8
},
"metadata": {},
"filter": [
"all",
[
"==",
"class",
"country"
],
[
"!has",
"iso_a2"
]
],
"order": 199
},
{
"id": "country_3",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "place",
"minzoom": 5,
"maxzoom": 12,
"layout": {
"text-font": [
"Noto Sans Bold"
],
"text-size": {
"stops": [
[
3,
11
],
[
5,
13
],
[
7,
17
]
]
},
"text-field": "{name:latin}",
"visibility": "visible",
"text-max-width": 6.25,
"text-transform": "none"
},
"paint": {
"text-color": {
"stops": [
[
3,
"rgba(108, 78, 107, 1)"
],
[
10,
"rgba(57, 37, 73, 1)"
]
]
},
"text-halo-blur": 1,
"text-halo-color": "rgba(255,255,255,0.8)",
"text-halo-width": 0.8
},
"metadata": {},
"filter": [
"all",
[
">=",
"rank",
3
],
[
"==",
"class",
"country"
],
[
"has",
"iso_a2"
]
],
"order": 200
},
{
"id": "country_2",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "place",
"minzoom": 2,
"maxzoom": 12,
"layout": {
"text-font": [
"Noto Sans Bold"
],
"text-size": {
"stops": [
[
3,
11
],
[
5,
14
],
[
7,
19
]
]
},
"text-field": "{name:latin}",
"visibility": "visible",
"text-max-width": 6.25,
"text-transform": "none"
},
"paint": {
"text-color": {
"stops": [
[
3,
"rgba(108, 78, 107, 1)"
],
[
10,
"rgba(57, 37, 73, 1)"
]
]
},
"text-halo-blur": 1,
"text-halo-color": "rgba(255,255,255,0.8)",
"text-halo-width": 0.8
},
"metadata": {},
"filter": [
"all",
[
"==",
"rank",
2
],
[
"==",
"class",
"country"
],
[
"has",
"iso_a2"
]
],
"order": 201
},
{
"id": "country_1",
"type": "symbol",
"source": "openmaptiles",
"source-layer": "place",
"minzoom": 2,
"maxzoom": 12,
"layout": {
"text-font": [
"Noto Sans Bold"
],
"text-size": {
"stops": [
[
3,
11
],
[
5,
14
],
[
7,
19
]
]
},
"text-field": "{name:latin}",
"visibility": "visible",
"text-max-width": 6.25,
"text-transform": "none"
},
"paint": {
"text-color": {
"stops": [
[
2,
"rgba(108, 78, 107, 1)"
],
[
10,
"rgba(57, 37, 73, 1)"
]
]
},
"text-halo-blur": 1,
"text-halo-color": "rgba(255,255,255,0.8)",
"text-halo-width": 0.8
},
"metadata": {},
"filter": [
"all",
[
"==",
"rank",
1
],
[
"==",
"class",
"country"
],
[
"has",
"iso_a2"
]
],
"order": 202
}
]
}

View File

@ -1,10 +1,9 @@
DO DO
$$ $$
BEGIN BEGIN
PERFORM 'city_place'::regtype; IF NOT EXISTS(SELECT 1 FROM pg_type WHERE typname = 'city_place') THEN
EXCEPTION CREATE TYPE city_place AS enum ('city', 'town', 'village', 'hamlet', 'suburb', 'quarter', 'neighbourhood', 'isolated_dwelling');
WHEN undefined_object THEN END IF;
CREATE TYPE city_place AS enum ('city', 'town', 'village', 'hamlet', 'borough', 'suburb', 'quarter', 'neighbourhood', 'isolated_dwelling');
END END
$$; $$;

View File

@ -8,7 +8,7 @@ CREATE SCHEMA IF NOT EXISTS place_city;
CREATE TABLE IF NOT EXISTS place_city.osm_ids CREATE TABLE IF NOT EXISTS place_city.osm_ids
( (
osm_id bigint PRIMARY KEY osm_id bigint
); );
CREATE OR REPLACE FUNCTION update_osm_city_point(full_update boolean) RETURNS void AS CREATE OR REPLACE FUNCTION update_osm_city_point(full_update boolean) RETURNS void AS
@ -23,8 +23,8 @@ $$
LEFT JOIN ne_10m_populated_places AS ne ON LEFT JOIN ne_10m_populated_places AS ne ON
( (
(osm.tags ? 'wikidata' AND osm.tags->'wikidata' = ne.wikidataid) OR (osm.tags ? 'wikidata' AND osm.tags->'wikidata' = ne.wikidataid) OR
lower(osm.name) IN (lower(ne.name), lower(ne.namealt), lower(ne.meganame), lower(ne.name_en), lower(ne.nameascii)) OR lower(osm.name) IN (lower(ne.name), lower(ne.namealt), lower(ne.meganame), lower(ne.gn_ascii), lower(ne.nameascii)) OR
lower(osm.name_en) IN (lower(ne.name), lower(ne.namealt), lower(ne.meganame), lower(ne.name_en), lower(ne.nameascii)) OR lower(osm.name_en) IN (lower(ne.name), lower(ne.namealt), lower(ne.meganame), lower(ne.gn_ascii), lower(ne.nameascii)) OR
ne.name = unaccent(osm.name) ne.name = unaccent(osm.name)
) )
AND osm.place IN ('city', 'town', 'village') AND osm.place IN ('city', 'town', 'village')
@ -49,12 +49,18 @@ $$ LANGUAGE SQL;
SELECT update_osm_city_point(true); SELECT update_osm_city_point(true);
CREATE INDEX IF NOT EXISTS osm_city_point_rank_idx ON osm_city_point ("rank");
-- Handle updates -- Handle updates
CREATE OR REPLACE FUNCTION place_city.store() RETURNS trigger AS CREATE OR REPLACE FUNCTION place_city.store() RETURNS trigger AS
$$ $$
BEGIN BEGIN
INSERT INTO place_city.osm_ids VALUES (NEW.osm_id) ON CONFLICT (osm_id) DO NOTHING; IF (tg_op = 'DELETE') THEN
INSERT INTO place_city.osm_ids VALUES (OLD.osm_id);
ELSE
INSERT INTO place_city.osm_ids VALUES (NEW.osm_id);
END IF;
RETURN NULL; RETURN NULL;
END; END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
@ -79,11 +85,6 @@ DECLARE
t TIMESTAMP WITH TIME ZONE := clock_timestamp(); t TIMESTAMP WITH TIME ZONE := clock_timestamp();
BEGIN BEGIN
RAISE LOG 'Refresh place_city rank'; RAISE LOG 'Refresh place_city rank';
-- Analyze tracking and source tables before performing update
ANALYZE place_city.osm_ids;
ANALYZE osm_city_point;
PERFORM update_osm_city_point(false); PERFORM update_osm_city_point(false);
-- noinspection SqlWithoutWhere -- noinspection SqlWithoutWhere
DELETE FROM place_city.osm_ids; DELETE FROM place_city.osm_ids;
@ -96,17 +97,15 @@ END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
CREATE TRIGGER trigger_store CREATE TRIGGER trigger_store
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_city_point ON osm_city_point
FOR EACH ROW FOR EACH ROW
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE place_city.store(); EXECUTE PROCEDURE place_city.store();
CREATE TRIGGER trigger_flag CREATE TRIGGER trigger_flag
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_city_point ON osm_city_point
FOR EACH STATEMENT FOR EACH STATEMENT
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE place_city.flag(); EXECUTE PROCEDURE place_city.flag();
CREATE CONSTRAINT TRIGGER trigger_refresh CREATE CONSTRAINT TRIGGER trigger_refresh

View File

@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS place_continent_point;
CREATE TABLE IF NOT EXISTS place_continent_point.osm_ids CREATE TABLE IF NOT EXISTS place_continent_point.osm_ids
( (
osm_id bigint PRIMARY KEY osm_id bigint
); );
-- etldoc: osm_continent_point -> osm_continent_point -- etldoc: osm_continent_point -> osm_continent_point
@ -26,7 +26,11 @@ SELECT update_osm_continent_point(true);
CREATE OR REPLACE FUNCTION place_continent_point.store() RETURNS trigger AS CREATE OR REPLACE FUNCTION place_continent_point.store() RETURNS trigger AS
$$ $$
BEGIN BEGIN
INSERT INTO place_continent_point.osm_ids VALUES (NEW.osm_id) ON CONFLICT (osm_id) DO NOTHING; IF (tg_op = 'DELETE') THEN
INSERT INTO place_continent_point.osm_ids VALUES (OLD.osm_id);
ELSE
INSERT INTO place_continent_point.osm_ids VALUES (NEW.osm_id);
END IF;
RETURN NULL; RETURN NULL;
END; END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
@ -51,11 +55,6 @@ DECLARE
t TIMESTAMP WITH TIME ZONE := clock_timestamp(); t TIMESTAMP WITH TIME ZONE := clock_timestamp();
BEGIN BEGIN
RAISE LOG 'Refresh place_continent_point'; RAISE LOG 'Refresh place_continent_point';
-- Analyze tracking and source tables before performing update
ANALYZE place_continent_point.osm_ids;
ANALYZE osm_continent_point;
PERFORM update_osm_continent_point(false); PERFORM update_osm_continent_point(false);
-- noinspection SqlWithoutWhere -- noinspection SqlWithoutWhere
DELETE FROM place_continent_point.osm_ids; DELETE FROM place_continent_point.osm_ids;
@ -68,17 +67,15 @@ END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
CREATE TRIGGER trigger_store CREATE TRIGGER trigger_store
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_continent_point ON osm_continent_point
FOR EACH ROW FOR EACH ROW
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE place_continent_point.store(); EXECUTE PROCEDURE place_continent_point.store();
CREATE TRIGGER trigger_flag CREATE TRIGGER trigger_flag
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_continent_point ON osm_continent_point
FOR EACH STATEMENT FOR EACH STATEMENT
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE place_continent_point.flag(); EXECUTE PROCEDURE place_continent_point.flag();
CREATE CONSTRAINT TRIGGER trigger_refresh CREATE CONSTRAINT TRIGGER trigger_refresh

View File

@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS place_country;
CREATE TABLE IF NOT EXISTS place_country.osm_ids CREATE TABLE IF NOT EXISTS place_country.osm_ids
( (
osm_id bigint PRIMARY KEY osm_id bigint
); );
-- etldoc: ne_10m_admin_0_countries -> osm_country_point -- etldoc: ne_10m_admin_0_countries -> osm_country_point
@ -98,12 +98,18 @@ $$ LANGUAGE SQL;
SELECT update_osm_country_point(true); SELECT update_osm_country_point(true);
CREATE INDEX IF NOT EXISTS osm_country_point_rank_idx ON osm_country_point ("rank");
-- Handle updates -- Handle updates
CREATE OR REPLACE FUNCTION place_country.store() RETURNS trigger AS CREATE OR REPLACE FUNCTION place_country.store() RETURNS trigger AS
$$ $$
BEGIN BEGIN
INSERT INTO place_country.osm_ids VALUES (NEW.osm_id) ON CONFLICT (osm_id) DO NOTHING; IF (tg_op = 'DELETE') THEN
INSERT INTO place_country.osm_ids VALUES (OLD.osm_id);
ELSE
INSERT INTO place_country.osm_ids VALUES (NEW.osm_id);
END IF;
RETURN NULL; RETURN NULL;
END; END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
@ -128,11 +134,6 @@ DECLARE
t TIMESTAMP WITH TIME ZONE := clock_timestamp(); t TIMESTAMP WITH TIME ZONE := clock_timestamp();
BEGIN BEGIN
RAISE LOG 'Refresh place_country rank'; RAISE LOG 'Refresh place_country rank';
-- Analyze tracking and source tables before performing update
ANALYZE place_country.osm_ids;
ANALYZE osm_country_point;
PERFORM update_osm_country_point(false); PERFORM update_osm_country_point(false);
-- noinspection SqlWithoutWhere -- noinspection SqlWithoutWhere
DELETE FROM place_country.osm_ids; DELETE FROM place_country.osm_ids;
@ -145,17 +146,15 @@ END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
CREATE TRIGGER trigger_store CREATE TRIGGER trigger_store
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_country_point ON osm_country_point
FOR EACH ROW FOR EACH ROW
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE place_country.store(); EXECUTE PROCEDURE place_country.store();
CREATE TRIGGER trigger_flag CREATE TRIGGER trigger_flag
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_country_point ON osm_country_point
FOR EACH STATEMENT FOR EACH STATEMENT
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE place_country.flag(); EXECUTE PROCEDURE place_country.flag();
CREATE CONSTRAINT TRIGGER trigger_refresh CREATE CONSTRAINT TRIGGER trigger_refresh

View File

@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS place_island_point;
CREATE TABLE IF NOT EXISTS place_island_point.osm_ids CREATE TABLE IF NOT EXISTS place_island_point.osm_ids
( (
osm_id bigint PRIMARY KEY osm_id bigint
); );
-- etldoc: osm_island_point -> osm_island_point -- etldoc: osm_island_point -> osm_island_point
@ -26,7 +26,11 @@ SELECT update_osm_island_point(true);
CREATE OR REPLACE FUNCTION place_island_point.store() RETURNS trigger AS CREATE OR REPLACE FUNCTION place_island_point.store() RETURNS trigger AS
$$ $$
BEGIN BEGIN
INSERT INTO place_island_point.osm_ids VALUES (NEW.osm_id) ON CONFLICT (osm_id) DO NOTHING; IF (tg_op = 'DELETE') THEN
INSERT INTO place_island_point.osm_ids VALUES (OLD.osm_id);
ELSE
INSERT INTO place_island_point.osm_ids VALUES (NEW.osm_id);
END IF;
RETURN NULL; RETURN NULL;
END; END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
@ -51,11 +55,6 @@ DECLARE
t TIMESTAMP WITH TIME ZONE := clock_timestamp(); t TIMESTAMP WITH TIME ZONE := clock_timestamp();
BEGIN BEGIN
RAISE LOG 'Refresh place_island_point'; RAISE LOG 'Refresh place_island_point';
-- Analyze tracking and source tables before performing update
ANALYZE place_island_point.osm_ids;
ANALYZE osm_island_point;
PERFORM update_osm_island_point(false); PERFORM update_osm_island_point(false);
-- noinspection SqlWithoutWhere -- noinspection SqlWithoutWhere
DELETE FROM place_island_point.osm_ids; DELETE FROM place_island_point.osm_ids;
@ -68,17 +67,15 @@ END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
CREATE TRIGGER trigger_store CREATE TRIGGER trigger_store
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_island_point ON osm_island_point
FOR EACH ROW FOR EACH ROW
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE place_island_point.store(); EXECUTE PROCEDURE place_island_point.store();
CREATE TRIGGER trigger_flag CREATE TRIGGER trigger_flag
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_island_point ON osm_island_point
FOR EACH STATEMENT FOR EACH STATEMENT
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE place_island_point.flag(); EXECUTE PROCEDURE place_island_point.flag();
CREATE CONSTRAINT TRIGGER trigger_refresh CREATE CONSTRAINT TRIGGER trigger_refresh

View File

@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS place_island_polygon;
CREATE TABLE IF NOT EXISTS place_island_polygon.osm_ids CREATE TABLE IF NOT EXISTS place_island_polygon.osm_ids
( (
osm_id bigint PRIMARY KEY osm_id bigint
); );
-- etldoc: osm_island_polygon -> osm_island_polygon -- etldoc: osm_island_polygon -> osm_island_polygon
@ -33,7 +33,11 @@ SELECT update_osm_island_polygon(true);
CREATE OR REPLACE FUNCTION place_island_polygon.store() RETURNS trigger AS CREATE OR REPLACE FUNCTION place_island_polygon.store() RETURNS trigger AS
$$ $$
BEGIN BEGIN
INSERT INTO place_island_polygon.osm_ids VALUES (NEW.osm_id) ON CONFLICT (osm_id) DO NOTHING; IF (tg_op = 'DELETE') THEN
INSERT INTO place_island_polygon.osm_ids VALUES (OLD.osm_id);
ELSE
INSERT INTO place_island_polygon.osm_ids VALUES (NEW.osm_id);
END IF;
RETURN NULL; RETURN NULL;
END; END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
@ -58,11 +62,6 @@ DECLARE
t TIMESTAMP WITH TIME ZONE := clock_timestamp(); t TIMESTAMP WITH TIME ZONE := clock_timestamp();
BEGIN BEGIN
RAISE LOG 'Refresh place_island_polygon'; RAISE LOG 'Refresh place_island_polygon';
-- Analyze tracking and source tables before performing update
ANALYZE place_island_polygon.osm_ids;
ANALYZE osm_island_polygon;
PERFORM update_osm_island_polygon(false); PERFORM update_osm_island_polygon(false);
-- noinspection SqlWithoutWhere -- noinspection SqlWithoutWhere
DELETE FROM place_island_polygon.osm_ids; DELETE FROM place_island_polygon.osm_ids;
@ -75,17 +74,15 @@ END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
CREATE TRIGGER trigger_store CREATE TRIGGER trigger_store
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_island_polygon ON osm_island_polygon
FOR EACH ROW FOR EACH ROW
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE place_island_polygon.store(); EXECUTE PROCEDURE place_island_polygon.store();
CREATE TRIGGER trigger_flag CREATE TRIGGER trigger_flag
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_island_polygon ON osm_island_polygon
FOR EACH STATEMENT FOR EACH STATEMENT
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE place_island_polygon.flag(); EXECUTE PROCEDURE place_island_polygon.flag();
CREATE CONSTRAINT TRIGGER trigger_refresh CREATE CONSTRAINT TRIGGER trigger_refresh

View File

@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS place_state;
CREATE TABLE IF NOT EXISTS place_state.osm_ids CREATE TABLE IF NOT EXISTS place_state.osm_ids
( (
osm_id bigint PRIMARY KEY osm_id bigint
); );
-- etldoc: ne_10m_admin_1_states_provinces -> osm_state_point -- etldoc: ne_10m_admin_1_states_provinces -> osm_state_point
@ -29,8 +29,8 @@ $$
-- because name matching is difficult -- because name matching is difficult
ST_Within(osm.geometry, ne.geometry) ST_Within(osm.geometry, ne.geometry)
-- We leave out leess important states -- We leave out leess important states
AND ne.scalerank <= 6 AND ne.scalerank <= 3
AND ne.labelrank <= 7 AND ne.labelrank <= 2
) )
UPDATE osm_state_point AS osm UPDATE osm_state_point AS osm
-- Normalize both scalerank and labelrank into a ranking system from 1 to 6. -- Normalize both scalerank and labelrank into a ranking system from 1 to 6.
@ -60,12 +60,18 @@ $$ LANGUAGE SQL;
SELECT update_osm_state_point(true); SELECT update_osm_state_point(true);
CREATE INDEX IF NOT EXISTS osm_state_point_rank_idx ON osm_state_point ("rank");
-- Handle updates -- Handle updates
CREATE OR REPLACE FUNCTION place_state.store() RETURNS trigger AS CREATE OR REPLACE FUNCTION place_state.store() RETURNS trigger AS
$$ $$
BEGIN BEGIN
INSERT INTO place_state.osm_ids VALUES (NEW.osm_id) ON CONFLICT (osm_id) DO NOTHING; IF (tg_op = 'DELETE') THEN
INSERT INTO place_state.osm_ids VALUES (OLD.osm_id);
ELSE
INSERT INTO place_state.osm_ids VALUES (NEW.osm_id);
END IF;
RETURN NULL; RETURN NULL;
END; END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
@ -90,11 +96,6 @@ DECLARE
t TIMESTAMP WITH TIME ZONE := clock_timestamp(); t TIMESTAMP WITH TIME ZONE := clock_timestamp();
BEGIN BEGIN
RAISE LOG 'Refresh place_state rank'; RAISE LOG 'Refresh place_state rank';
-- Analyze tracking and source tables before performing update
ANALYZE place_state.osm_ids;
ANALYZE osm_state_point;
PERFORM update_osm_state_point(false); PERFORM update_osm_state_point(false);
-- noinspection SqlWithoutWhere -- noinspection SqlWithoutWhere
DELETE FROM place_state.osm_ids; DELETE FROM place_state.osm_ids;
@ -107,17 +108,15 @@ END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
CREATE TRIGGER trigger_store CREATE TRIGGER trigger_store
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_state_point ON osm_state_point
FOR EACH ROW FOR EACH ROW
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE place_state.store(); EXECUTE PROCEDURE place_state.store();
CREATE TRIGGER trigger_flag CREATE TRIGGER trigger_flag
AFTER INSERT OR UPDATE AFTER INSERT OR UPDATE OR DELETE
ON osm_state_point ON osm_state_point
FOR EACH STATEMENT FOR EACH STATEMENT
WHEN (pg_trigger_depth() < 1)
EXECUTE PROCEDURE place_state.flag(); EXECUTE PROCEDURE place_state.flag();
CREATE CONSTRAINT TRIGGER trigger_refresh CREATE CONSTRAINT TRIGGER trigger_refresh

Some files were not shown because too many files have changed in this diff Show More