mirror of
https://github.com/mapbox/tippecanoe.git
synced 2025-06-25 01:59:11 +00:00
Compare commits
151 Commits
Author | SHA1 | Date | |
---|---|---|---|
94b09821e1 | |||
1b3c4b7420 | |||
b4339b2f75 | |||
a51ddbe180 | |||
10e35c4300 | |||
39cd5e210e | |||
70d11cc335 | |||
b115a07005 | |||
26beada6bd | |||
32179b7ad6 | |||
cc05f46fb7 | |||
41faf3a5c2 | |||
e01ea076ed | |||
506c801c65 | |||
9d25afa41f | |||
9abf09eb7e | |||
138699d243 | |||
3bc5a07e5b | |||
e7b4443838 | |||
0ff6819efb | |||
6fd72d4518 | |||
c19c913bf9 | |||
65f3737325 | |||
1b72804358 | |||
18cdcb0732 | |||
af13a95dc1 | |||
b2fdcba6b0 | |||
767a581874 | |||
0d3192b863 | |||
530852ae00 | |||
4bb88e228a | |||
7724e2c329 | |||
efe3c62bb1 | |||
6951c1b72d | |||
59faead7fa | |||
a42dbd7968 | |||
62052cafab | |||
265b6866db | |||
e6c5aa9bfe | |||
e6997b00ff | |||
498e723563 | |||
dc3021656e | |||
d96dee8dad | |||
a185073f0a | |||
86a341c344 | |||
a1d3ecf9bb | |||
1a44538bdf | |||
46626e4f08 | |||
725ea71e57 | |||
cde1e60603 | |||
55e93a5d37 | |||
fbe4416fba | |||
9b34f7e6e3 | |||
d5d322f36a | |||
448617e0a7 | |||
263ae94e75 | |||
1a95504390 | |||
b70d19288e | |||
fd60cc6600 | |||
8a1f0d83e1 | |||
d9ff3f78fc | |||
34b00eca73 | |||
70291f0415 | |||
bc2f243f0b | |||
6341419229 | |||
c048311124 | |||
25072133fb | |||
142ea37e17 | |||
4001df81cc | |||
22471ab5be | |||
79a08edcf6 | |||
4eaa740f55 | |||
2a6af266b7 | |||
5ba8f2f866 | |||
95997b50c4 | |||
67fe27f70a | |||
a4c79e1ec2 | |||
ce6a1aac88 | |||
a0693446d5 | |||
38dc80ec68 | |||
901f6a76b6 | |||
555ababd2e | |||
9e162e6f8f | |||
1381f0f276 | |||
12fb2c969c | |||
167ec690a0 | |||
4f9edf7f29 | |||
fa9474ff74 | |||
d64328ac35 | |||
ed2f968b4e | |||
4041811372 | |||
85919de490 | |||
e66d976d55 | |||
1f8581c76c | |||
0cd733eb77 | |||
fd8de691eb | |||
e95cc82678 | |||
a076c5619d | |||
86925eea4c | |||
73b63133e2 | |||
2198bcc2a6 | |||
f5135ebc63 | |||
eb24c6e21e | |||
3d074653b5 | |||
a880f44a91 | |||
8002609f0c | |||
d370b07231 | |||
26bcdef06b | |||
4549d1e4f4 | |||
f3e051a610 | |||
b59a251924 | |||
f0a8e5b192 | |||
9343c5fcc1 | |||
816ef2eca8 | |||
41b28b2a1b | |||
558a7a412c | |||
572df8ad39 | |||
a8b2db8d5a | |||
97d65e6b7d | |||
b2eff13667 | |||
290e39f80c | |||
0b84f13159 | |||
5a2003cb2c | |||
32010fc893 | |||
48b5db6ae5 | |||
7f3551070e | |||
92bbf27f72 | |||
cba1b8ae7f | |||
0d0a546b1e | |||
ad17f1f282 | |||
3b9f4691c1 | |||
a40192bcde | |||
c90ba8511f | |||
34a6422c42 | |||
3f2818a814 | |||
c177b8bed2 | |||
d69431e16b | |||
105dfa73d7 | |||
a867646dfd | |||
b068635acf | |||
40ecfc0668 | |||
38a41f4df8 | |||
380550ce85 | |||
028fef470e | |||
b7b476b36c | |||
08ff40e42f | |||
eaeb55bf71 | |||
a1e7426956 | |||
77c4ce4171 | |||
cc58588724 | |||
2bd0cb9f43 |
10
CHANGELOG.md
Normal file
10
CHANGELOG.md
Normal file
@ -0,0 +1,10 @@
|
||||
## 1.2.0
|
||||
|
||||
* Switched to top-down rendering, yielding performance improvements
|
||||
* Add a dot-density gamma feature to thin out especially dense clusters
|
||||
* Add support for multiple layers, making it possible to include more
|
||||
than one GeoJSON featurecollection in a map. [#29](https://github.com/mapbox/tippecanoe/pull/29)
|
||||
* Added flags that let you optionally avoid simplifying lines, restricting
|
||||
maximum tile sizes, and coalescing features [#30](https://github.com/mapbox/tippecanoe/pull/30)
|
||||
* Added check that minimum zoom level is less than maximum zoom level
|
||||
* Added `-v` flag to check tippecanoe's version
|
19
MADE_WITH.md
Normal file
19
MADE_WITH.md
Normal file
@ -0,0 +1,19 @@
|
||||
## [Visualizing a Month of Lightning](http://rousseau.io/2015/03/23/visualizing-a-month-of-lightning/) by Jordan Rousseau
|
||||
|
||||

|
||||
|
||||
## [Making the most detailed tweet map ever](https://www.mapbox.com/blog/twitter-map-every-tweet/) by Eric Fischer
|
||||
|
||||

|
||||
|
||||
## [Superpowering Runkeeper's 1.5 million walks, runs, and bike rides](https://www.mapbox.com/blog/runkeeper-million-routes/)
|
||||
|
||||

|
||||
|
||||
## [The Geotaggers' World Atlas](https://www.mapbox.com/blog/geotaggers-world-atlas/) by Eric Fischer
|
||||
|
||||

|
||||
|
||||
## [Atmospheric River](https://www.mapbox.com/blog/atmospheric-river/)
|
||||
|
||||

|
16
Makefile
16
Makefile
@ -1,9 +1,17 @@
|
||||
PREFIX=/usr/local
|
||||
PREFIX ?= /usr/local
|
||||
MANDIR ?= /usr/share/man/man1/
|
||||
|
||||
all: tippecanoe enumerate decode
|
||||
|
||||
docs: man/tippecanoe.1
|
||||
|
||||
install: tippecanoe
|
||||
mkdir -p $(PREFIX)/bin
|
||||
cp tippecanoe $(PREFIX)/bin/tippecanoe
|
||||
cp man/tippecanoe.1 $(MANDIR)
|
||||
|
||||
man/tippecanoe.1: README.md
|
||||
md2man-roff README.md > man/tippecanoe.1
|
||||
|
||||
vector_tile.pb.cc vector_tile.pb.h: vector_tile.proto
|
||||
protoc --cpp_out=. vector_tile.proto
|
||||
@ -11,11 +19,12 @@ vector_tile.pb.cc vector_tile.pb.h: vector_tile.proto
|
||||
PG=
|
||||
|
||||
H = $(shell find . '(' -name '*.h' -o -name '*.hh' ')')
|
||||
C = $(shell find . '(' -name '*.c' -o -name '*.cc' ')')
|
||||
|
||||
INCLUDES = -I/usr/local/include
|
||||
LIBS = -L/usr/local/lib
|
||||
|
||||
tippecanoe: geojson.o jsonpull.o vector_tile.pb.o tile.o clip.o pool.o mbtiles.o geometry.o projection.o
|
||||
tippecanoe: geojson.o jsonpull.o vector_tile.pb.o tile.o clip.o pool.o mbtiles.o geometry.o projection.o memfile.o
|
||||
g++ $(PG) $(LIBS) -O3 -g -Wall -o $@ $^ -lm -lz -lprotobuf-lite -lsqlite3
|
||||
|
||||
enumerate: enumerate.o
|
||||
@ -36,3 +45,6 @@ libjsonpull.a: jsonpull.o
|
||||
|
||||
clean:
|
||||
rm tippecanoe *.o
|
||||
|
||||
indent:
|
||||
clang-format -i -style="{BasedOnStyle: Google, IndentWidth: 8, UseTab: Always, AllowShortIfStatementsOnASingleLine: false, ColumnLimit: 0, ContinuationIndentWidth: 8, SpaceAfterCStyleCast: true, IndentCaseLabels: false, AllowShortBlocksOnASingleLine: false, AllowShortFunctionsOnASingleLine: false}" $(C) $(H)
|
||||
|
156
README.md
156
README.md
@ -1,14 +1,48 @@
|
||||
tippecanoe
|
||||
==========
|
||||
|
||||
Build vector tilesets from large collections of GeoJSON features.
|
||||
Builds [vector tilesets](https://www.mapbox.com/developers/vector-tiles/) from large collections of [GeoJSON](http://geojson.org/)
|
||||
features. This is a tool for [making maps from huge datasets](MADE_WITH.md).
|
||||
|
||||
Intent
|
||||
------
|
||||
|
||||
The goal of Tippecanoe is to enable making a scale-independent view of your data,
|
||||
so that at any level from the entire world to a single building, you can see
|
||||
the density and texture of the data rather than a simplification from dropping
|
||||
supposedly unimportant features or clustering or aggregating them.
|
||||
|
||||
If you give it all of OpenStreetMap and zoom out, it should give you back
|
||||
something that looks like "[All Streets](http://benfry.com/allstreets/map5.html)"
|
||||
rather than something that looks like an Interstate road atlas.
|
||||
|
||||
If you give it all the building footprints in Los Angeles and zoom out
|
||||
far enough that most individual buildings are no longer discernable, you
|
||||
should still be able to see the extent and variety of development in every neighborhood,
|
||||
not just the largest downtown buildings.
|
||||
|
||||
If you give it a collection of years of tweet locations, you should be able to
|
||||
see the shape and relative popularity of every point of interest and every
|
||||
significant travel corridor.
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
The easiest way to install tippecanoe on OSX is with [Homebrew](http://brew.sh/):
|
||||
|
||||
```js
|
||||
$ brew install tippecanoe
|
||||
```
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
tippecanoe -o file.mbtiles [file.json]
|
||||
```sh
|
||||
$ tippecanoe -o file.mbtiles [file.json ...]
|
||||
```
|
||||
|
||||
If the file is not specified, it reads GeoJSON from the standard input.
|
||||
If no files are specified, it reads GeoJSON from the standard input.
|
||||
If multiple files are specified, each is placed in its own layer.
|
||||
|
||||
The GeoJSON features need not be wrapped in a FeatureCollection.
|
||||
You can concatenate multiple GeoJSON features or files together,
|
||||
@ -18,25 +52,85 @@ it encounters.
|
||||
Options
|
||||
-------
|
||||
|
||||
* -l <i>name</i>: Layer name (default "file" if source is file.json)
|
||||
* -n <i>name</i>: Human-readable name (default file.json)
|
||||
* -z <i>zoom</i>: Base zoom level (default 14)
|
||||
* -Z <i>zoom</i>: Lowest zoom level (default 0)
|
||||
* -d <i>detail</i>: Detail at base zoom level (default 26-basezoom, ~0.5m, for tile resolution of 4096 if -z14)
|
||||
* -D <i>detail</i>: Detail at lower zoom levels (default 10, for tile resolution of 1024)
|
||||
* -x <i>name</i>: Exclude the named properties from all features
|
||||
* -y <i>name</i>: Include the named properties in all features, excluding all those not explicitly named
|
||||
* -X: Exclude all properties and encode only geometries
|
||||
### Naming
|
||||
|
||||
* -l _name_: Layer name (default "file" if source is file.json or output is file.mbtiles). Only works if there is only one layer.
|
||||
* -n _name_: Human-readable name (default file.json)
|
||||
|
||||
### File control
|
||||
|
||||
* -o _file_.mbtiles: Name the output file.
|
||||
* -f: Delete the mbtiles file if it already exists instead of giving an error
|
||||
* -r <i>rate</i>: Rate at which dots are dropped at lower zoom levels (default 2.5)
|
||||
* -b <i>pixels</i>: Buffer size where features are duplicated from adjacent tiles (default 5)
|
||||
|
||||
### Zoom levels and resolution
|
||||
|
||||
* -z _zoom_: Base (maxzoom) zoom level (default 14)
|
||||
* -Z _zoom_: Lowest (minzoom) zoom level (default 0)
|
||||
* -d _detail_: Detail at base zoom level (default 26-basezoom, ~0.5m, for tile resolution of 4096 if -z14)
|
||||
* -D _detail_: Detail at lower zoom levels (default 10, for tile resolution of 1024)
|
||||
* -m _detail_: Minimum detail that it will try if tiles are too big at regular detail (default 7)
|
||||
* -b _pixels_: Buffer size where features are duplicated from adjacent tiles. Units are "screen pixels"--1/256th of the tile width or height. (default 5)
|
||||
|
||||
### Properties
|
||||
|
||||
* -x _name_: Exclude the named properties from all features
|
||||
* -y _name_: Include the named properties in all features, excluding all those not explicitly named
|
||||
* -X: Exclude all properties and encode only geometries
|
||||
|
||||
### Point simplification
|
||||
|
||||
* -r _rate_: Rate at which dots are dropped at lower zoom levels (default 2.5)
|
||||
* -g _gamma_: Rate at which especially dense dots are dropped (default 0, for no effect). A gamma of 2 reduces the number of dots less than a pixel apart to the square root of their original number.
|
||||
|
||||
### Doing less
|
||||
|
||||
* -ps: Don't simplify lines
|
||||
* -pr: Don't reverse the direction of lines to make them coalesce better
|
||||
* -pc: Don't coalesce features with the same properties
|
||||
* -pf: Don't limit tiles to 200,000 features
|
||||
* -pk: Don't limit tiles to 500K bytes
|
||||
* -po: Don't reorder features to put the same properties in sequence
|
||||
* -pl: Let "dot" simplification apply to lines too
|
||||
* -pd: Dynamically drop some fraction of features from large tiles to keep them under the 500K size limit. It will probably look ugly at the tile boundaries.
|
||||
* -q: Work quietly instead of reporting progress
|
||||
|
||||
Example
|
||||
-------
|
||||
|
||||
tippecanoe -o alameda.mbtiles -l alameda -n "Alameda County from TIGER" -z13 tl_2014_06001_roads.json
|
||||
```sh
|
||||
$ tippecanoe -o alameda.mbtiles -l alameda -n "Alameda County from TIGER" -z13 tl_2014_06001_roads.json
|
||||
```
|
||||
|
||||
cat tiger/tl_2014_*_roads.json | tippecanoe -o tiger.mbtiles -l roads -n "All TIGER roads, one zoom" -z12 -Z12 -d14 -x LINEARID -x RTTYP
|
||||
```
|
||||
$ cat tiger/tl_2014_*_roads.json | tippecanoe -o tiger.mbtiles -l roads -n "All TIGER roads, one zoom" -z12 -Z12 -d14 -x LINEARID -x RTTYP
|
||||
```
|
||||
|
||||
Point styling
|
||||
-------------
|
||||
|
||||
To provide a consistent density gradient as you zoom, the Mapbox Studio style needs to be
|
||||
coordinated with the base zoom level and dot-dropping rate. You can use this shell script to
|
||||
calculate the appropriate marker-width at high zoom levels to match the fraction of dots
|
||||
that were dropped at low zoom levels.
|
||||
|
||||
If you used `-z` to change the base zoom level or `-r` to change the
|
||||
dot-dropping rate, replace them in the `basezoom` and `rate` below.
|
||||
|
||||
awk 'BEGIN {
|
||||
dotsize = 2; # up to you to decide
|
||||
basezoom = 14; # tippecanoe -z 14
|
||||
rate = 2.5; # tippecanoe -r 2.5
|
||||
|
||||
print " marker-line-width: 0;";
|
||||
print " marker-ignore-placement: true;";
|
||||
print " marker-allow-overlap: true;";
|
||||
print " marker-width: " dotsize ";";
|
||||
for (i = basezoom + 1; i <= 22; i++) {
|
||||
print " [zoom >= " i "] { marker-width: " (dotsize * exp(log(sqrt(rate)) * (i - basezoom))) "; }";
|
||||
}
|
||||
|
||||
exit(0);
|
||||
}'
|
||||
|
||||
Geometric simplifications
|
||||
-------------------------
|
||||
@ -48,6 +142,10 @@ For point features, it drops 1/2.5 of the dots for each zoom level above the bas
|
||||
I don't know why 2.5 is the appropriate number, but the densities of many different
|
||||
data sets fall off at about this same rate. You can use -r to specify a different rate.
|
||||
|
||||
You can use the gamma option to thin out especially dense clusters of points.
|
||||
For any area that where dots are closer than one pixel together (at whatever zoom level),
|
||||
a gamma of 3, for example, will reduce these clusters to the cube root of their original density.
|
||||
|
||||
For line features, it drops any features that are too small to draw at all.
|
||||
This still leaves the lower zooms too dark (and too dense for the 500K tile limit,
|
||||
in some places), so I need to figure out an equitable way to throw features away.
|
||||
@ -67,11 +165,33 @@ lower resolutions before failing if it still doesn't fit.
|
||||
Development
|
||||
-----------
|
||||
|
||||
Requires protoc (brew install protobuf or apt-get install libprotobuf-dev),
|
||||
and sqlite3 (apt-get install libsqlite3-dev). To build:
|
||||
Requires protoc and sqlite3. Rebuilding the manpage
|
||||
uses md2man (`gem install md2man`).
|
||||
|
||||
MacOS:
|
||||
|
||||
brew install protobuf
|
||||
|
||||
Linux:
|
||||
|
||||
sudo apt-get install libprotobuf-dev
|
||||
sudo apt-get install protobuf-compiler
|
||||
sudo apt-get install libsqlite3-dev
|
||||
|
||||
Then build:
|
||||
|
||||
make
|
||||
|
||||
and perhaps
|
||||
|
||||
make install
|
||||
|
||||
Examples
|
||||
------
|
||||
|
||||
Check out [some examples of maps made with tippecanoe](MADE_WITH.md)
|
||||
|
||||
Name
|
||||
----
|
||||
|
||||
The name is [a joking reference](http://en.wikipedia.org/wiki/Tippecanoe_and_Tyler_Too) to a "tiler" for making map tiles.
|
||||
|
10
clip.c
10
clip.c
@ -31,10 +31,10 @@ int clip(double *x0, double *y0, double *x1, double *y1, double xmin, double ymi
|
||||
int changed = 0;
|
||||
|
||||
while (1) {
|
||||
if (!(outcode0 | outcode1)) { // Bitwise OR is 0. Trivially accept and get out of loop
|
||||
if (!(outcode0 | outcode1)) { // Bitwise OR is 0. Trivially accept and get out of loop
|
||||
accept = 1;
|
||||
break;
|
||||
} else if (outcode0 & outcode1) { // Bitwise AND is not 0. Trivially reject and get out of loop
|
||||
} else if (outcode0 & outcode1) { // Bitwise AND is not 0. Trivially reject and get out of loop
|
||||
break;
|
||||
} else {
|
||||
// failed both tests, so calculate the line segment to clip
|
||||
@ -46,16 +46,16 @@ int clip(double *x0, double *y0, double *x1, double *y1, double xmin, double ymi
|
||||
|
||||
// Now find the intersection point;
|
||||
// use formulas y = y0 + slope * (x - x0), x = x0 + (1 / slope) * (y - y0)
|
||||
if (outcodeOut & TOP) { // point is above the clip rectangle
|
||||
if (outcodeOut & TOP) { // point is above the clip rectangle
|
||||
x = *x0 + (*x1 - *x0) * (ymax - *y0) / (*y1 - *y0);
|
||||
y = ymax;
|
||||
} else if (outcodeOut & BOTTOM) { // point is below the clip rectangle
|
||||
} else if (outcodeOut & BOTTOM) { // point is below the clip rectangle
|
||||
x = *x0 + (*x1 - *x0) * (ymin - *y0) / (*y1 - *y0);
|
||||
y = ymin;
|
||||
} else if (outcodeOut & RIGHT) { // point is to the right of clip rectangle
|
||||
y = *y0 + (*y1 - *y0) * (xmax - *x0) / (*x1 - *x0);
|
||||
x = xmax;
|
||||
} else if (outcodeOut & LEFT) { // point is to the left of clip rectangle
|
||||
} else if (outcodeOut & LEFT) { // point is to the left of clip rectangle
|
||||
y = *y0 + (*y1 - *y0) * (xmin - *x0) / (*x1 - *x0);
|
||||
x = xmin;
|
||||
}
|
||||
|
84
decode.cc
84
decode.cc
@ -8,18 +8,16 @@
|
||||
#include "vector_tile.pb.h"
|
||||
|
||||
extern "C" {
|
||||
#include "projection.h"
|
||||
#include "projection.h"
|
||||
}
|
||||
|
||||
// https://github.com/mapbox/mapnik-vector-tile/blob/master/src/vector_tile_compression.hpp
|
||||
inline bool is_compressed(std::string const& data) {
|
||||
return data.size() > 2 &&
|
||||
(((uint8_t)data[0] == 0x78 && (uint8_t)data[1] == 0x9C) ||
|
||||
((uint8_t)data[0] == 0x1F && (uint8_t)data[1] == 0x8B));
|
||||
inline bool is_compressed(std::string const &data) {
|
||||
return data.size() > 2 && (((uint8_t) data[0] == 0x78 && (uint8_t) data[1] == 0x9C) || ((uint8_t) data[0] == 0x1F && (uint8_t) data[1] == 0x8B));
|
||||
}
|
||||
|
||||
// https://github.com/mapbox/mapnik-vector-tile/blob/master/src/vector_tile_compression.hpp
|
||||
inline int decompress(std::string const& input, std::string & output) {
|
||||
inline int decompress(std::string const &input, std::string &output) {
|
||||
z_stream inflate_s;
|
||||
inflate_s.zalloc = Z_NULL;
|
||||
inflate_s.zfree = Z_NULL;
|
||||
@ -29,13 +27,13 @@ inline int decompress(std::string const& input, std::string & output) {
|
||||
if (inflateInit2(&inflate_s, 32 + 15) != Z_OK) {
|
||||
fprintf(stderr, "error: %s\n", inflate_s.msg);
|
||||
}
|
||||
inflate_s.next_in = (Bytef *)input.data();
|
||||
inflate_s.next_in = (Bytef *) input.data();
|
||||
inflate_s.avail_in = input.size();
|
||||
size_t length = 0;
|
||||
do {
|
||||
output.resize(length + 2 * input.size());
|
||||
inflate_s.avail_out = 2 * input.size();
|
||||
inflate_s.next_out = (Bytef *)(output.data() + length);
|
||||
inflate_s.next_out = (Bytef *) (output.data() + length);
|
||||
int ret = inflate(&inflate_s, Z_FINISH);
|
||||
if (ret != Z_STREAM_END && ret != Z_OK && ret != Z_BUF_ERROR) {
|
||||
fprintf(stderr, "error: %s\n", inflate_s.msg);
|
||||
@ -53,6 +51,16 @@ int dezig(unsigned n) {
|
||||
return (n >> 1) ^ (-(n & 1));
|
||||
}
|
||||
|
||||
void out(const char *s) {
|
||||
for (; *s; s++) {
|
||||
if (*s == ':' || *s == '=') {
|
||||
putchar('_');
|
||||
} else {
|
||||
putchar(*s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void handle(std::string message, int z, unsigned x, unsigned y) {
|
||||
GOOGLE_PROTOBUF_VERIFY_VERSION;
|
||||
|
||||
@ -61,7 +69,7 @@ void handle(std::string message, int z, unsigned x, unsigned y) {
|
||||
|
||||
if (is_compressed(message)) {
|
||||
std::string uncompressed;
|
||||
decompress(message,uncompressed);
|
||||
decompress(message, uncompressed);
|
||||
if (!tile.ParseFromString(uncompressed)) {
|
||||
fprintf(stderr, "Couldn't decompress tile %d/%u/%u\n", z, x, y);
|
||||
exit(EXIT_FAILURE);
|
||||
@ -78,6 +86,8 @@ void handle(std::string message, int z, unsigned x, unsigned y) {
|
||||
for (int f = 0; f < layer.features_size(); f++) {
|
||||
mapnik::vector::tile_feature feat = layer.features(f);
|
||||
int px = 0, py = 0;
|
||||
int within = 0;
|
||||
double startlat = 0, startlon = 0;
|
||||
|
||||
for (int g = 0; g < feat.geometry_size(); g++) {
|
||||
uint32_t geom = feat.geometry(g);
|
||||
@ -86,9 +96,34 @@ void handle(std::string message, int z, unsigned x, unsigned y) {
|
||||
|
||||
if (op == 1 || op == 2) {
|
||||
if (op == 1) {
|
||||
printf("\n");
|
||||
if (feat.type() == 3) {
|
||||
if (g == 0) {
|
||||
printf("outer ");
|
||||
} else {
|
||||
printf("\ninner ");
|
||||
}
|
||||
|
||||
for (int m = 0; m + 1 < feat.tags_size(); m += 2) {
|
||||
int k = feat.tags(m);
|
||||
int v = feat.tags(m + 1);
|
||||
|
||||
out(layer.keys(k).c_str());
|
||||
printf("=");
|
||||
mapnik::vector::tile_value const &value = layer.values(v);
|
||||
if (value.has_string_value()) {
|
||||
out(value.string_value().c_str());
|
||||
}
|
||||
|
||||
printf(" ");
|
||||
}
|
||||
|
||||
printf(": ");
|
||||
} else {
|
||||
printf("\n");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
for (unsigned k = 0; k < count; k++) {
|
||||
px += dezig(feat.geometry(g + 1));
|
||||
py += dezig(feat.geometry(g + 2));
|
||||
@ -101,9 +136,28 @@ void handle(std::string message, int z, unsigned x, unsigned y) {
|
||||
double lat, lon;
|
||||
tile2latlon(wx, wy, 32, &lat, &lon);
|
||||
printf("%f,%f ", lat, lon);
|
||||
|
||||
if (op == 1) {
|
||||
startlat = lat;
|
||||
startlon = lon;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#if 0
|
||||
printf(": ");
|
||||
|
||||
for (int m = 0; m + 1 < feat.tags_size(); m += 2) {
|
||||
int k = feat.tags(m);
|
||||
int v = feat.tags(m + 1);
|
||||
|
||||
printf("%s ", layer.keys(k).c_str());
|
||||
}
|
||||
#endif
|
||||
|
||||
printf("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -114,7 +168,7 @@ void decode(char *fname, int z, unsigned x, unsigned y) {
|
||||
unsigned ox = x, oy = y;
|
||||
|
||||
if (sqlite3_open(fname, &db) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: %s\n", fname, sqlite3_errmsg(db));
|
||||
fprintf(stderr, "%s: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
@ -150,10 +204,10 @@ void decode(char *fname, int z, unsigned x, unsigned y) {
|
||||
y /= 2;
|
||||
}
|
||||
|
||||
if (sqlite3_close(db) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: could not close database: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (sqlite3_close(db) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: could not close database: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
void usage(char **argv) {
|
||||
|
14
enumerate.c
14
enumerate.c
@ -7,7 +7,7 @@ void enumerate(char *fname) {
|
||||
sqlite3 *db;
|
||||
|
||||
if (sqlite3_open(fname, &db) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: %s\n", fname, sqlite3_errmsg(db));
|
||||
fprintf(stderr, "%s: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
@ -24,16 +24,16 @@ void enumerate(char *fname) {
|
||||
long long x = sqlite3_column_int(stmt, 1);
|
||||
long long y = sqlite3_column_int(stmt, 2);
|
||||
|
||||
y = (1LL << zoom) - y;
|
||||
y = (1LL << zoom) - 1 - y;
|
||||
printf("%s %lld %lld %lld\n", fname, zoom, x, y);
|
||||
}
|
||||
|
||||
sqlite3_finalize(stmt);
|
||||
|
||||
if (sqlite3_close(db) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: could not close database: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (sqlite3_close(db) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: could not close database: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
void usage(char **argv) {
|
||||
@ -43,7 +43,7 @@ void usage(char **argv) {
|
||||
|
||||
int main(int argc, char **argv) {
|
||||
extern int optind;
|
||||
//extern char *optarg;
|
||||
// extern char *optarg;
|
||||
int i;
|
||||
|
||||
while ((i = getopt(argc, argv, "")) != -1) {
|
||||
|
221
geometry.cc
221
geometry.cc
@ -8,17 +8,25 @@
|
||||
#include <unistd.h>
|
||||
#include <math.h>
|
||||
#include <sqlite3.h>
|
||||
#include <limits.h>
|
||||
#include "geometry.hh"
|
||||
|
||||
extern "C" {
|
||||
#include "tile.h"
|
||||
#include "clip.h"
|
||||
#include "projection.h"
|
||||
#include "tile.h"
|
||||
#include "clip.h"
|
||||
#include "projection.h"
|
||||
}
|
||||
|
||||
drawvec decode_geometry(char **meta, int z, unsigned tx, unsigned ty, int detail) {
|
||||
drawvec decode_geometry(char **meta, int z, unsigned tx, unsigned ty, int detail, long long *bbox) {
|
||||
drawvec out;
|
||||
|
||||
bbox[0] = LONG_LONG_MAX;
|
||||
bbox[1] = LONG_LONG_MAX;
|
||||
bbox[2] = LONG_LONG_MIN;
|
||||
bbox[3] = LONG_LONG_MIN;
|
||||
|
||||
long long wx = initial_x, wy = initial_y;
|
||||
|
||||
while (1) {
|
||||
draw d;
|
||||
|
||||
@ -28,18 +36,35 @@ drawvec decode_geometry(char **meta, int z, unsigned tx, unsigned ty, int detail
|
||||
}
|
||||
|
||||
if (d.op == VT_MOVETO || d.op == VT_LINETO) {
|
||||
int wx, wy;
|
||||
deserialize_int(meta, &wx);
|
||||
deserialize_int(meta, &wy);
|
||||
long long dx, dy;
|
||||
|
||||
long long wwx = (unsigned) wx;
|
||||
long long wwy = (unsigned) wy;
|
||||
deserialize_long_long(meta, &dx);
|
||||
deserialize_long_long(meta, &dy);
|
||||
|
||||
wx += dx << geometry_scale;
|
||||
wy += dy << geometry_scale;
|
||||
|
||||
long long wwx = wx;
|
||||
long long wwy = wy;
|
||||
|
||||
if (z != 0) {
|
||||
wwx -= tx << (32 - z);
|
||||
wwy -= ty << (32 - z);
|
||||
}
|
||||
|
||||
if (wwx < bbox[0]) {
|
||||
bbox[0] = wwx;
|
||||
}
|
||||
if (wwy < bbox[1]) {
|
||||
bbox[1] = wwy;
|
||||
}
|
||||
if (wwx > bbox[2]) {
|
||||
bbox[2] = wwx;
|
||||
}
|
||||
if (wwy > bbox[3]) {
|
||||
bbox[3] = wwy;
|
||||
}
|
||||
|
||||
d.x = wwx;
|
||||
d.y = wwy;
|
||||
}
|
||||
@ -59,7 +84,7 @@ void to_tile_scale(drawvec &geom, int z, int detail) {
|
||||
}
|
||||
}
|
||||
|
||||
drawvec remove_noop(drawvec geom, int type) {
|
||||
drawvec remove_noop(drawvec geom, int type, int shift) {
|
||||
// first pass: remove empty linetos
|
||||
|
||||
long long x = 0, y = 0;
|
||||
@ -67,7 +92,7 @@ drawvec remove_noop(drawvec geom, int type) {
|
||||
unsigned i;
|
||||
|
||||
for (i = 0; i < geom.size(); i++) {
|
||||
if (geom[i].op == VT_LINETO && geom[i].x == x && geom[i].y == y) {
|
||||
if (geom[i].op == VT_LINETO && (geom[i].x >> shift) == x && (geom[i].y >> shift) == y) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -75,8 +100,8 @@ drawvec remove_noop(drawvec geom, int type) {
|
||||
out.push_back(geom[i]);
|
||||
} else { /* moveto or lineto */
|
||||
out.push_back(geom[i]);
|
||||
x = geom[i].x;
|
||||
y = geom[i].y;
|
||||
x = geom[i].x >> shift;
|
||||
y = geom[i].y >> shift;
|
||||
}
|
||||
}
|
||||
|
||||
@ -96,7 +121,7 @@ drawvec remove_noop(drawvec geom, int type) {
|
||||
}
|
||||
|
||||
if (geom[i + 1].op == VT_CLOSEPATH) {
|
||||
i++; // also remove unused closepath
|
||||
i++; // also remove unused closepath
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -112,7 +137,7 @@ drawvec remove_noop(drawvec geom, int type) {
|
||||
|
||||
for (i = 0; i < geom.size(); i++) {
|
||||
if (geom[i].op == VT_MOVETO) {
|
||||
if (i > 0 && geom[i - 1].op == VT_LINETO && geom[i - 1].x == geom[i].x && geom[i - 1].y == geom[i].y) {
|
||||
if (i > 0 && geom[i - 1].op == VT_LINETO && (geom[i - 1].x >> shift) == (geom[i].x >> shift) && (geom[i - 1].y >> shift) == (geom[i].y >> shift)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -121,6 +146,32 @@ drawvec remove_noop(drawvec geom, int type) {
|
||||
}
|
||||
}
|
||||
|
||||
// remove degenerate polygons
|
||||
|
||||
if (type == VT_POLYGON) {
|
||||
geom = out;
|
||||
out.resize(0);
|
||||
|
||||
for (i = 0; i < geom.size(); i++) {
|
||||
if (geom[i].op == VT_MOVETO) {
|
||||
if (i + 1 < geom.size() && (geom[i + 1].op == VT_MOVETO || geom[i + 1].op == VT_CLOSEPATH)) {
|
||||
i += 1;
|
||||
continue; // no lineto
|
||||
}
|
||||
if (i + 2 < geom.size() && (geom[i + 2].op == VT_MOVETO || geom[i + 2].op == VT_CLOSEPATH)) {
|
||||
i += 2;
|
||||
continue; // just one lineto
|
||||
}
|
||||
if (i + 3 < geom.size() && (geom[i + 3].op == VT_MOVETO)) {
|
||||
i += 3;
|
||||
continue; // just two linetos. two linetos and a closepath is ok
|
||||
}
|
||||
}
|
||||
|
||||
out.push_back(geom[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
@ -183,17 +234,17 @@ static bool inside(draw d, int edge, long long area, long long buffer) {
|
||||
long long clip_buffer = buffer * area / 256;
|
||||
|
||||
switch (edge) {
|
||||
case 0: // top
|
||||
return d.y > -clip_buffer;
|
||||
case 0: // top
|
||||
return d.y > -clip_buffer;
|
||||
|
||||
case 1: // right
|
||||
return d.x < area + clip_buffer;
|
||||
case 1: // right
|
||||
return d.x < area + clip_buffer;
|
||||
|
||||
case 2: // bottom
|
||||
return d.y < area + clip_buffer;
|
||||
case 2: // bottom
|
||||
return d.y < area + clip_buffer;
|
||||
|
||||
case 3: // left
|
||||
return d.x > -clip_buffer;
|
||||
case 3: // left
|
||||
return d.x > -clip_buffer;
|
||||
}
|
||||
|
||||
fprintf(stderr, "internal error inside\n");
|
||||
@ -208,8 +259,8 @@ static draw get_line_intersection(draw p0, draw p1, draw p2, draw p3) {
|
||||
double s2_y = p3.y - p2.y;
|
||||
|
||||
double t;
|
||||
//s = (-s1_y * (p0.x - p2.x) + s1_x * (p0.y - p2.y)) / (-s2_x * s1_y + s1_x * s2_y);
|
||||
t = ( s2_x * (p0.y - p2.y) - s2_y * (p0.x - p2.x)) / (-s2_x * s1_y + s1_x * s2_y);
|
||||
// s = (-s1_y * (p0.x - p2.x) + s1_x * (p0.y - p2.y)) / (-s2_x * s1_y + s1_x * s2_y);
|
||||
t = (s2_x * (p0.y - p2.y) - s2_y * (p0.x - p2.x)) / (-s2_x * s1_y + s1_x * s2_y);
|
||||
|
||||
return draw(VT_LINETO, p0.x + (t * s1_x), p0.y + (t * s1_y));
|
||||
}
|
||||
@ -218,21 +269,21 @@ static draw intersect(draw a, draw b, int edge, long long area, long long buffer
|
||||
long long clip_buffer = buffer * area / 256;
|
||||
|
||||
switch (edge) {
|
||||
case 0: // top
|
||||
return get_line_intersection(a, b, draw(VT_MOVETO, -clip_buffer, -clip_buffer), draw(VT_MOVETO, area + clip_buffer, -clip_buffer));
|
||||
break;
|
||||
case 0: // top
|
||||
return get_line_intersection(a, b, draw(VT_MOVETO, -clip_buffer, -clip_buffer), draw(VT_MOVETO, area + clip_buffer, -clip_buffer));
|
||||
break;
|
||||
|
||||
case 1: // right
|
||||
return get_line_intersection(a, b, draw(VT_MOVETO, area + clip_buffer, -clip_buffer), draw(VT_MOVETO, area + clip_buffer, area + clip_buffer));
|
||||
break;
|
||||
case 1: // right
|
||||
return get_line_intersection(a, b, draw(VT_MOVETO, area + clip_buffer, -clip_buffer), draw(VT_MOVETO, area + clip_buffer, area + clip_buffer));
|
||||
break;
|
||||
|
||||
case 2: // bottom
|
||||
return get_line_intersection(a, b, draw(VT_MOVETO, area + clip_buffer, area + clip_buffer), draw(VT_MOVETO, -clip_buffer, area + clip_buffer));
|
||||
break;
|
||||
case 2: // bottom
|
||||
return get_line_intersection(a, b, draw(VT_MOVETO, area + clip_buffer, area + clip_buffer), draw(VT_MOVETO, -clip_buffer, area + clip_buffer));
|
||||
break;
|
||||
|
||||
case 3: // left
|
||||
return get_line_intersection(a, b, draw(VT_MOVETO, -clip_buffer, area + clip_buffer), draw(VT_MOVETO, -clip_buffer, -clip_buffer));
|
||||
break;
|
||||
case 3: // left
|
||||
return get_line_intersection(a, b, draw(VT_MOVETO, -clip_buffer, area + clip_buffer), draw(VT_MOVETO, -clip_buffer, -clip_buffer));
|
||||
break;
|
||||
}
|
||||
|
||||
fprintf(stderr, "internal error intersecting\n");
|
||||
@ -308,7 +359,9 @@ drawvec clip_poly(drawvec &geom, int z, int detail, int buffer) {
|
||||
}
|
||||
|
||||
if (j >= geom.size() || geom[j].op == VT_CLOSEPATH) {
|
||||
out.push_back(draw(VT_CLOSEPATH, 0, 0));
|
||||
if (out.size() > 0 && out[out.size() - 1].op != VT_CLOSEPATH) {
|
||||
out.push_back(draw(VT_CLOSEPATH, 0, 0));
|
||||
}
|
||||
i = j;
|
||||
} else {
|
||||
i = j - 1;
|
||||
@ -348,7 +401,7 @@ drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double
|
||||
area = fabs(area / 2);
|
||||
|
||||
if (area <= pixel * pixel) {
|
||||
//printf("area is only %f vs %lld so using square\n", area, pixel * pixel);
|
||||
// printf("area is only %f vs %lld so using square\n", area, pixel * pixel);
|
||||
|
||||
*accum_area += area;
|
||||
if (*accum_area > pixel * pixel) {
|
||||
@ -363,7 +416,7 @@ drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double
|
||||
*accum_area -= pixel * pixel;
|
||||
}
|
||||
} else {
|
||||
//printf("area is %f so keeping instead of %lld\n", area, pixel * pixel);
|
||||
// printf("area is %f so keeping instead of %lld\n", area, pixel * pixel);
|
||||
|
||||
for (unsigned k = i; k <= j && k < geom.size(); k++) {
|
||||
out.push_back(geom[k]);
|
||||
@ -374,7 +427,13 @@ drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double
|
||||
|
||||
i = j;
|
||||
} else {
|
||||
fprintf(stderr, "how did we get here with %d?\n", geom[i].op);
|
||||
fprintf(stderr, "how did we get here with %d in %d?\n", geom[i].op, (int) geom.size());
|
||||
|
||||
for (unsigned n = 0; n < geom.size(); n++) {
|
||||
fprintf(stderr, "%d/%lld/%lld ", geom[n].op, geom[n].x, geom[n].y);
|
||||
}
|
||||
fprintf(stderr, "\n");
|
||||
|
||||
out.push_back(geom[i]);
|
||||
}
|
||||
}
|
||||
@ -382,11 +441,68 @@ drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double
|
||||
return out;
|
||||
}
|
||||
|
||||
drawvec clip_point(drawvec &geom, int z, int detail, long long buffer) {
|
||||
drawvec out;
|
||||
unsigned i;
|
||||
|
||||
long long min = 0;
|
||||
long long area = 0xFFFFFFFF;
|
||||
if (z != 0) {
|
||||
area = 1LL << (32 - z);
|
||||
|
||||
min -= buffer * area / 256;
|
||||
area += buffer * area / 256;
|
||||
}
|
||||
|
||||
for (i = 0; i < geom.size(); i++) {
|
||||
if (geom[i].x >= min && geom[i].y >= min && geom[i].x <= area && geom[i].y <= area) {
|
||||
out.push_back(geom[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
int quick_check(long long *bbox, int z, int detail, long long buffer) {
|
||||
long long min = 0;
|
||||
long long area = 0xFFFFFFFF;
|
||||
if (z != 0) {
|
||||
area = 1LL << (32 - z);
|
||||
|
||||
min -= buffer * area / 256;
|
||||
area += buffer * area / 256;
|
||||
}
|
||||
|
||||
// bbox entirely outside the tile
|
||||
if (bbox[0] > area || bbox[1] > area) {
|
||||
return 0;
|
||||
}
|
||||
if (bbox[2] < min || bbox[3] < min) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// bbox entirely within the tile
|
||||
if (bbox[0] > min && bbox[1] > min && bbox[2] < area && bbox[3] < area) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
// some overlap of edge
|
||||
return 2;
|
||||
}
|
||||
|
||||
drawvec clip_lines(drawvec &geom, int z, int detail, long long buffer) {
|
||||
drawvec out;
|
||||
unsigned i;
|
||||
|
||||
long long min = 0;
|
||||
long long area = 0xFFFFFFFF;
|
||||
if (z != 0) {
|
||||
area = 1LL << (32 - z);
|
||||
|
||||
min -= buffer * area / 256;
|
||||
area += buffer * area / 256;
|
||||
}
|
||||
|
||||
for (i = 0; i < geom.size(); i++) {
|
||||
if (i > 0 && (geom[i - 1].op == VT_MOVETO || geom[i - 1].op == VT_LINETO) && geom[i].op == VT_LINETO) {
|
||||
double x1 = geom[i - 1].x;
|
||||
@ -395,24 +511,15 @@ drawvec clip_lines(drawvec &geom, int z, int detail, long long buffer) {
|
||||
double x2 = geom[i - 0].x;
|
||||
double y2 = geom[i - 0].y;
|
||||
|
||||
long long min = 0;
|
||||
long long area = 0xFFFFFFFF;
|
||||
if (z != 0) {
|
||||
area = 1LL << (32 - z);
|
||||
|
||||
min -= buffer * area / 256;
|
||||
area += buffer * area / 256;
|
||||
}
|
||||
|
||||
int c = clip(&x1, &y1, &x2, &y2, min, min, area, area);
|
||||
|
||||
if (c > 1) { // clipped
|
||||
if (c > 1) { // clipped
|
||||
out.push_back(draw(VT_MOVETO, x1, y1));
|
||||
out.push_back(draw(VT_LINETO, x2, y2));
|
||||
out.push_back(draw(VT_MOVETO, geom[i].x, geom[i].y));
|
||||
} else if (c == 1) { // unchanged
|
||||
} else if (c == 1) { // unchanged
|
||||
out.push_back(geom[i]);
|
||||
} else { // clipped away entirely
|
||||
} else { // clipped away entirely
|
||||
out.push_back(draw(VT_MOVETO, geom[i].x, geom[i].y));
|
||||
}
|
||||
} else {
|
||||
@ -476,9 +583,7 @@ static void douglas_peucker(drawvec &geom, int start, int n, double e) {
|
||||
// find index idx of element with max_distance
|
||||
int i;
|
||||
for (i = first + 1; i < second; i++) {
|
||||
double temp_dist = square_distance_from_line(geom[start + i].x, geom[start + i].y,
|
||||
geom[start + first].x, geom[start + first].y,
|
||||
geom[start + second].x, geom[start + second].y);
|
||||
double temp_dist = square_distance_from_line(geom[start + i].x, geom[start + i].y, geom[start + first].x, geom[start + first].y, geom[start + second].x, geom[start + second].y);
|
||||
|
||||
double distance = fabs(temp_dist);
|
||||
|
||||
@ -548,6 +653,10 @@ drawvec simplify_lines(drawvec &geom, int z, int detail) {
|
||||
drawvec reorder_lines(drawvec &geom) {
|
||||
// Only reorder simple linestrings with a single moveto
|
||||
|
||||
if (geom.size() == 0) {
|
||||
return geom;
|
||||
}
|
||||
|
||||
unsigned i;
|
||||
for (i = 0; i < geom.size(); i++) {
|
||||
if (geom[i].op == VT_MOVETO) {
|
||||
|
@ -10,16 +10,19 @@ struct draw {
|
||||
this->y = y;
|
||||
}
|
||||
|
||||
draw() { }
|
||||
draw() {
|
||||
}
|
||||
};
|
||||
|
||||
typedef std::vector<draw> drawvec;
|
||||
|
||||
drawvec decode_geometry(char **meta, int z, unsigned tx, unsigned ty, int detail);
|
||||
drawvec decode_geometry(char **meta, int z, unsigned tx, unsigned ty, int detail, long long *bbox);
|
||||
void to_tile_scale(drawvec &geom, int z, int detail);
|
||||
drawvec remove_noop(drawvec geom, int type);
|
||||
drawvec remove_noop(drawvec geom, int type, int shift);
|
||||
drawvec clip_point(drawvec &geom, int z, int detail, long long buffer);
|
||||
drawvec clip_poly(drawvec &geom, int z, int detail, int buffer);
|
||||
drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double *accum_area);
|
||||
drawvec clip_lines(drawvec &geom, int z, int detail, long long buffer);
|
||||
int quick_check(long long *bbox, int z, int detail, long long buffer);
|
||||
drawvec simplify_lines(drawvec &geom, int z, int detail);
|
||||
drawvec reorder_lines(drawvec &geom);
|
||||
|
32
jsonpull.c
32
jsonpull.c
@ -58,29 +58,22 @@ json_pull *json_begin_file(FILE *f) {
|
||||
return json_begin(read_file, f);
|
||||
}
|
||||
|
||||
#if 0
|
||||
static int read_string(json_pull *j) {
|
||||
static int read_string(json_pull *j, char *buffer, int n) {
|
||||
char *cp = j->source;
|
||||
if (*cp == '\0') {
|
||||
return EOF;
|
||||
}
|
||||
int c = (unsigned char) *cp;
|
||||
j->source = cp + 1;
|
||||
return c;
|
||||
}
|
||||
int out = 0;
|
||||
|
||||
static int peek_string(json_pull *p) {
|
||||
char *cp = p->source;
|
||||
if (*cp == '\0') {
|
||||
return EOF;
|
||||
while (out < n && cp[out] != '\0') {
|
||||
buffer[out] = cp[out];
|
||||
out++;
|
||||
}
|
||||
return (unsigned char) *cp;
|
||||
|
||||
j->source = cp + out;
|
||||
return out;
|
||||
}
|
||||
|
||||
json_pull *json_begin_string(char *s) {
|
||||
return json_begin(read_string, peek_string, s);
|
||||
return json_begin(read_string, s);
|
||||
}
|
||||
#endif
|
||||
|
||||
void json_end(json_pull *p) {
|
||||
free(p->buffer);
|
||||
@ -161,7 +154,7 @@ static json_object *add_object(json_pull *j, json_type type) {
|
||||
return o;
|
||||
}
|
||||
|
||||
json_object *json_hash_get(json_object *o, char *s) {
|
||||
json_object *json_hash_get(json_object *o, const char *s) {
|
||||
if (o == NULL || o->type != JSON_HASH) {
|
||||
return NULL;
|
||||
}
|
||||
@ -254,7 +247,7 @@ again:
|
||||
}
|
||||
|
||||
if (j->container->expect != JSON_COMMA) {
|
||||
if (! (j->container->expect == JSON_ITEM && j->container->length == 0)) {
|
||||
if (!(j->container->expect == JSON_ITEM && j->container->length == 0)) {
|
||||
j->error = "Found ] without final element";
|
||||
return NULL;
|
||||
}
|
||||
@ -292,7 +285,7 @@ again:
|
||||
}
|
||||
|
||||
if (j->container->expect != JSON_COMMA) {
|
||||
if (! (j->container->expect == JSON_KEY && j->container->length == 0)) {
|
||||
if (!(j->container->expect == JSON_KEY && j->container->length == 0)) {
|
||||
j->error = "Found } without final element";
|
||||
return NULL;
|
||||
}
|
||||
@ -506,7 +499,6 @@ again:
|
||||
|
||||
json_object *s = add_object(j, JSON_STRING);
|
||||
if (s != NULL) {
|
||||
val.buf = realloc(val.buf, val.n + 1);
|
||||
s->string = val.buf;
|
||||
s->length = val.n;
|
||||
} else {
|
||||
|
20
jsonpull.h
20
jsonpull.h
@ -1,12 +1,21 @@
|
||||
typedef enum json_type {
|
||||
// These types can be returned by json_read()
|
||||
JSON_HASH, JSON_ARRAY, JSON_NUMBER, JSON_STRING, JSON_TRUE, JSON_FALSE, JSON_NULL,
|
||||
JSON_HASH,
|
||||
JSON_ARRAY,
|
||||
JSON_NUMBER,
|
||||
JSON_STRING,
|
||||
JSON_TRUE,
|
||||
JSON_FALSE,
|
||||
JSON_NULL,
|
||||
|
||||
// These and JSON_HASH and JSON_ARRAY can be called back by json_read_with_separators()
|
||||
JSON_COMMA, JSON_COLON,
|
||||
JSON_COMMA,
|
||||
JSON_COLON,
|
||||
|
||||
// These are only used internally as expectations of what comes next
|
||||
JSON_ITEM, JSON_KEY, JSON_VALUE,
|
||||
JSON_ITEM,
|
||||
JSON_KEY,
|
||||
JSON_VALUE,
|
||||
} json_type;
|
||||
|
||||
typedef struct json_object {
|
||||
@ -39,10 +48,7 @@ typedef struct json_pull {
|
||||
} json_pull;
|
||||
|
||||
json_pull *json_begin_file(FILE *f);
|
||||
|
||||
#if 0
|
||||
json_pull *json_begin_string(char *s);
|
||||
#endif
|
||||
|
||||
json_pull *json_begin(int (*read)(struct json_pull *, char *buffer, int n), void *source);
|
||||
void json_end(json_pull *p);
|
||||
@ -54,4 +60,4 @@ json_object *json_read(json_pull *j);
|
||||
json_object *json_read_separators(json_pull *j, json_separator_callback cb, void *state);
|
||||
void json_free(json_object *j);
|
||||
|
||||
json_object *json_hash_get(json_object *o, char *s);
|
||||
json_object *json_hash_get(json_object *o, const char *s);
|
||||
|
233
man/tippecanoe.1
Normal file
233
man/tippecanoe.1
Normal file
@ -0,0 +1,233 @@
|
||||
.TH tippecanoe
|
||||
.PP
|
||||
Builds vector tilesets
|
||||
\[la]https://www.mapbox.com/developers/vector-tiles/\[ra] from large collections of GeoJSON
|
||||
\[la]http://geojson.org/\[ra]
|
||||
features. This is a tool for making maps from huge datasets
|
||||
\[la]MADE_WITH.md\[ra]\&.
|
||||
.SH Intent
|
||||
.PP
|
||||
The goal of Tippecanoe is to enable making a scale\-independent view of your data,
|
||||
so that at any level from the entire world to a single building, you can see
|
||||
the density and texture of the data rather than a simplification from dropping
|
||||
supposedly unimportant features or clustering or aggregating them.
|
||||
.PP
|
||||
If you give it all of OpenStreetMap and zoom out, it should give you back
|
||||
something that looks like "All Streets
|
||||
\[la]http://benfry.com/allstreets/map5.html\[ra]"
|
||||
rather than something that looks like an Interstate road atlas.
|
||||
.PP
|
||||
If you give it all the building footprints in Los Angeles and zoom out
|
||||
far enough that most individual buildings are no longer discernable, you
|
||||
should still be able to see the extent and variety of development in every neighborhood,
|
||||
not just the largest downtown buildings.
|
||||
.PP
|
||||
If you give it a collection of years of tweet locations, you should be able to
|
||||
see the shape and relative popularity of every point of interest and every
|
||||
significant travel corridor.
|
||||
.SH Installation
|
||||
.PP
|
||||
The easiest way to install tippecanoe on OSX is with Homebrew
|
||||
\[la]http://brew.sh/\[ra]:
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
$ brew install tippecanoe
|
||||
.fi
|
||||
.RE
|
||||
.SH Usage
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
$ tippecanoe \-o file.mbtiles [file.json ...]
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
If no files are specified, it reads GeoJSON from the standard input.
|
||||
If multiple files are specified, each is placed in its own layer.
|
||||
.PP
|
||||
The GeoJSON features need not be wrapped in a FeatureCollection.
|
||||
You can concatenate multiple GeoJSON features or files together,
|
||||
and it will parse out the features and ignore whatever other objects
|
||||
it encounters.
|
||||
.SH Options
|
||||
.SS Naming
|
||||
.RS
|
||||
.IP \(bu 2
|
||||
\-l \fIname\fP: Layer name (default "file" if source is file.json or output is file.mbtiles). Only works if there is only one layer.
|
||||
.IP \(bu 2
|
||||
\-n \fIname\fP: Human\-readable name (default file.json)
|
||||
.RE
|
||||
.SS File control
|
||||
.RS
|
||||
.IP \(bu 2
|
||||
\-o \fIfile\fP\&.mbtiles: Name the output file.
|
||||
.IP \(bu 2
|
||||
\-f: Delete the mbtiles file if it already exists instead of giving an error
|
||||
.RE
|
||||
.SS Zoom levels and resolution
|
||||
.RS
|
||||
.IP \(bu 2
|
||||
\-z \fIzoom\fP: Base (maxzoom) zoom level (default 14)
|
||||
.IP \(bu 2
|
||||
\-Z \fIzoom\fP: Lowest (minzoom) zoom level (default 0)
|
||||
.IP \(bu 2
|
||||
\-d \fIdetail\fP: Detail at base zoom level (default 26\-basezoom, ~0.5m, for tile resolution of 4096 if \-z14)
|
||||
.IP \(bu 2
|
||||
\-D \fIdetail\fP: Detail at lower zoom levels (default 10, for tile resolution of 1024)
|
||||
.IP \(bu 2
|
||||
\-m \fIdetail\fP: Minimum detail that it will try if tiles are too big at regular detail (default 7)
|
||||
.IP \(bu 2
|
||||
\-b \fIpixels\fP: Buffer size where features are duplicated from adjacent tiles. Units are "screen pixels"\-\-1/256th of the tile width or height. (default 5)
|
||||
.RE
|
||||
.SS Properties
|
||||
.RS
|
||||
.IP \(bu 2
|
||||
\-x \fIname\fP: Exclude the named properties from all features
|
||||
.IP \(bu 2
|
||||
\-y \fIname\fP: Include the named properties in all features, excluding all those not explicitly named
|
||||
.IP \(bu 2
|
||||
\-X: Exclude all properties and encode only geometries
|
||||
.RE
|
||||
.SS Point simplification
|
||||
.RS
|
||||
.IP \(bu 2
|
||||
\-r \fIrate\fP: Rate at which dots are dropped at lower zoom levels (default 2.5)
|
||||
.IP \(bu 2
|
||||
\-g \fIgamma\fP: Rate at which especially dense dots are dropped (default 0, for no effect). A gamma of 2 reduces the number of dots less than a pixel apart to the square root of their original number.
|
||||
.RE
|
||||
.SS Doing less
|
||||
.RS
|
||||
.IP \(bu 2
|
||||
\-ps: Don't simplify lines
|
||||
.IP \(bu 2
|
||||
\-pr: Don't reverse the direction of lines to make them coalesce better
|
||||
.IP \(bu 2
|
||||
\-pc: Don't coalesce features with the same properties
|
||||
.IP \(bu 2
|
||||
\-pf: Don't limit tiles to 200,000 features
|
||||
.IP \(bu 2
|
||||
\-pk: Don't limit tiles to 500K bytes
|
||||
.IP \(bu 2
|
||||
\-po: Don't reorder features to put the same properties in sequence
|
||||
.IP \(bu 2
|
||||
\-pl: Let "dot" simplification apply to lines too
|
||||
.IP \(bu 2
|
||||
\-pd: Dynamically drop some fraction of features from large tiles to keep them under the 500K size limit. It will probably look ugly at the tile boundaries.
|
||||
.IP \(bu 2
|
||||
\-q: Work quietly instead of reporting progress
|
||||
.RE
|
||||
.SH Example
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
$ tippecanoe \-o alameda.mbtiles \-l alameda \-n "Alameda County from TIGER" \-z13 tl_2014_06001_roads.json
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
$ cat tiger/tl_2014_*_roads.json | tippecanoe \-o tiger.mbtiles \-l roads \-n "All TIGER roads, one zoom" \-z12 \-Z12 \-d14 \-x LINEARID \-x RTTYP
|
||||
.fi
|
||||
.RE
|
||||
.SH Point styling
|
||||
.PP
|
||||
To provide a consistent density gradient as you zoom, the Mapbox Studio style needs to be
|
||||
coordinated with the base zoom level and dot\-dropping rate. You can use this shell script to
|
||||
calculate the appropriate marker\-width at high zoom levels to match the fraction of dots
|
||||
that were dropped at low zoom levels.
|
||||
.PP
|
||||
If you used \fB\fC\-z\fR to change the base zoom level or \fB\fC\-r\fR to change the
|
||||
dot\-dropping rate, replace them in the \fB\fCbasezoom\fR and \fB\fCrate\fR below.
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
awk 'BEGIN {
|
||||
dotsize = 2; # up to you to decide
|
||||
basezoom = 14; # tippecanoe \-z 14
|
||||
rate = 2.5; # tippecanoe \-r 2.5
|
||||
print " marker\-line\-width: 0;";
|
||||
print " marker\-ignore\-placement: true;";
|
||||
print " marker\-allow\-overlap: true;";
|
||||
print " marker\-width: " dotsize ";";
|
||||
for (i = basezoom + 1; i <= 22; i++) {
|
||||
print " [zoom >= " i "] { marker\-width: " (dotsize * exp(log(sqrt(rate)) * (i \- basezoom))) "; }";
|
||||
}
|
||||
exit(0);
|
||||
}'
|
||||
.fi
|
||||
.RE
|
||||
.SH Geometric simplifications
|
||||
.PP
|
||||
At every zoom level, line and polygon features are subjected to Douglas\-Peucker
|
||||
simplification to the resolution of the tile.
|
||||
.PP
|
||||
For point features, it drops 1/2.5 of the dots for each zoom level above the base.
|
||||
I don't know why 2.5 is the appropriate number, but the densities of many different
|
||||
data sets fall off at about this same rate. You can use \-r to specify a different rate.
|
||||
.PP
|
||||
You can use the gamma option to thin out especially dense clusters of points.
|
||||
For any area that where dots are closer than one pixel together (at whatever zoom level),
|
||||
a gamma of 3, for example, will reduce these clusters to the cube root of their original density.
|
||||
.PP
|
||||
For line features, it drops any features that are too small to draw at all.
|
||||
This still leaves the lower zooms too dark (and too dense for the 500K tile limit,
|
||||
in some places), so I need to figure out an equitable way to throw features away.
|
||||
.PP
|
||||
Any polygons that are smaller than a minimum area (currently 9 square subpixels) will
|
||||
have their probability diffused, so that some of them will be drawn as a square of
|
||||
this minimum size and others will not be drawn at all, preserving the total area that
|
||||
all of them should have had together.
|
||||
.PP
|
||||
Features in the same tile that share the same type and attributes are coalesced
|
||||
together into a single geometry. You are strongly encouraged to use \-x to exclude
|
||||
any unnecessary properties to reduce wasted file size.
|
||||
.PP
|
||||
If a tile is larger than 500K, it will try encoding that tile at progressively
|
||||
lower resolutions before failing if it still doesn't fit.
|
||||
.SH Development
|
||||
.PP
|
||||
Requires protoc and sqlite3. Rebuilding the manpage
|
||||
uses md2man (\fB\fCgem install md2man\fR).
|
||||
.PP
|
||||
MacOS:
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
brew install protobuf
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
Linux:
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
sudo apt\-get install libprotobuf\-dev
|
||||
sudo apt\-get install protobuf\-compiler
|
||||
sudo apt\-get install libsqlite3\-dev
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
Then build:
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
make
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
and perhaps
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
make install
|
||||
.fi
|
||||
.RE
|
||||
.SH Examples
|
||||
.PP
|
||||
Check out some examples of maps made with tippecanoe
|
||||
\[la]MADE_WITH.md\[ra]
|
||||
.SH Name
|
||||
.PP
|
||||
The name is a joking reference
|
||||
\[la]http://en.wikipedia.org/wiki/Tippecanoe_and_Tyler_Too\[ra] to a "tiler" for making map tiles.
|
73
mbtiles.c
73
mbtiles.c
@ -12,7 +12,7 @@ sqlite3 *mbtiles_open(char *dbname, char **argv) {
|
||||
sqlite3 *outdb;
|
||||
|
||||
if (sqlite3_open(dbname, &outdb) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: %s: %s\n", argv[0], dbname, sqlite3_errmsg(outdb));
|
||||
fprintf(stderr, "%s: %s: %s\n", argv[0], dbname, sqlite3_errmsg(outdb));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
@ -70,24 +70,30 @@ void mbtiles_write_tile(sqlite3 *outdb, int z, int tx, int ty, const char *data,
|
||||
}
|
||||
}
|
||||
|
||||
static void quote(char **buf, char *s) {
|
||||
static void quote(char **buf, const char *s) {
|
||||
char tmp[strlen(s) * 8 + 1];
|
||||
char *out = tmp;
|
||||
|
||||
for (; *s != '\0'; s++) {
|
||||
if (*s == '\\' || *s == '\"') {
|
||||
unsigned char ch = (unsigned char) *s;
|
||||
|
||||
if (ch == '\\' || ch == '\"') {
|
||||
*out++ = '\\';
|
||||
*out++ = *s;
|
||||
} else if (*s < ' ') {
|
||||
sprintf(out, "\\u%04x", *s);
|
||||
*out++ = ch;
|
||||
} else if (ch < ' ') {
|
||||
sprintf(out, "\\u%04x", ch);
|
||||
out = out + strlen(out);
|
||||
} else {
|
||||
*out++ = *s;
|
||||
*out++ = ch;
|
||||
}
|
||||
}
|
||||
|
||||
*out = '\0';
|
||||
*buf = realloc(*buf, strlen(*buf) + strlen(tmp) + 1);
|
||||
if (*buf == NULL) {
|
||||
perror("realloc");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
strcat(*buf, tmp);
|
||||
}
|
||||
|
||||
@ -107,7 +113,7 @@ static void aprintf(char **buf, const char *format, ...) {
|
||||
free(tmp);
|
||||
}
|
||||
|
||||
void mbtiles_write_metadata(sqlite3 *outdb, char *fname, char *layername, int minzoom, int maxzoom, double minlat, double minlon, double maxlat, double maxlon, double midlat, double midlon, struct pool *fields) {
|
||||
void mbtiles_write_metadata(sqlite3 *outdb, const char *fname, char **layername, int minzoom, int maxzoom, double minlat, double minlon, double maxlat, double maxlon, double midlat, double midlon, struct pool **file_keys, int nlayers) {
|
||||
char *sql, *err;
|
||||
|
||||
sql = sqlite3_mprintf("INSERT INTO metadata (name, value) VALUES ('name', %Q);", fname);
|
||||
@ -174,27 +180,40 @@ void mbtiles_write_metadata(sqlite3 *outdb, char *fname, char *layername, int mi
|
||||
sqlite3_free(sql);
|
||||
|
||||
char *buf = strdup("{");
|
||||
aprintf(&buf, "\"vector_layers\": [ { \"id\": \"");
|
||||
quote(&buf, layername);
|
||||
aprintf(&buf, "\", \"description\": \"\", \"minzoom\": %d, \"maxzoom\": %d, \"fields\": {", minzoom, maxzoom);
|
||||
aprintf(&buf, "\"vector_layers\": [ ");
|
||||
|
||||
struct pool_val *pv;
|
||||
for (pv = fields->head; pv != NULL; pv = pv->next) {
|
||||
aprintf(&buf, "\"");
|
||||
quote(&buf, pv->s);
|
||||
|
||||
if (pv->type == VT_NUMBER) {
|
||||
aprintf(&buf, "\": \"Number\"");
|
||||
} else {
|
||||
aprintf(&buf, "\": \"String\"");
|
||||
}
|
||||
|
||||
if (pv->next != NULL) {
|
||||
int i;
|
||||
for (i = 0; i < nlayers; i++) {
|
||||
if (i != 0) {
|
||||
aprintf(&buf, ", ");
|
||||
}
|
||||
|
||||
aprintf(&buf, "{ \"id\": \"");
|
||||
quote(&buf, layername[i]);
|
||||
aprintf(&buf, "\", \"description\": \"\", \"minzoom\": %d, \"maxzoom\": %d, \"fields\": {", minzoom, maxzoom);
|
||||
|
||||
struct pool_val *pv;
|
||||
for (pv = file_keys[i]->head; pv != NULL; pv = pv->next) {
|
||||
aprintf(&buf, "\"");
|
||||
quote(&buf, pv->s);
|
||||
|
||||
if (pv->type == VT_NUMBER) {
|
||||
aprintf(&buf, "\": \"Number\"");
|
||||
} else if (pv->type == VT_BOOLEAN) {
|
||||
aprintf(&buf, "\": \"Boolean\"");
|
||||
} else {
|
||||
aprintf(&buf, "\": \"String\"");
|
||||
}
|
||||
|
||||
if (pv->next != NULL) {
|
||||
aprintf(&buf, ", ");
|
||||
}
|
||||
}
|
||||
|
||||
aprintf(&buf, "} }");
|
||||
}
|
||||
|
||||
aprintf(&buf, "} } ] }");
|
||||
aprintf(&buf, " ] }");
|
||||
|
||||
sql = sqlite3_mprintf("INSERT INTO metadata (name, value) VALUES ('json', %Q);", buf);
|
||||
if (sqlite3_exec(outdb, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
@ -209,11 +228,7 @@ void mbtiles_close(sqlite3 *outdb, char **argv) {
|
||||
char *err;
|
||||
|
||||
if (sqlite3_exec(outdb, "ANALYZE;", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: index metadata: %s\n", argv[0], err);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (sqlite3_exec(outdb, "VACUUM;", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: index tiles: %s\n", argv[0], err);
|
||||
fprintf(stderr, "%s: ANALYZE failed: %s\n", argv[0], err);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (sqlite3_close(outdb) != SQLITE_OK) {
|
||||
|
@ -2,6 +2,6 @@ sqlite3 *mbtiles_open(char *dbname, char **argv);
|
||||
|
||||
void mbtiles_write_tile(sqlite3 *outdb, int z, int tx, int ty, const char *data, int size);
|
||||
|
||||
void mbtiles_write_metadata(sqlite3 *outdb, char *fname, char *layername, int minzoom, int maxzoom, double minlat, double minlon, double maxlat, double maxlon, double midlat, double midlon, struct pool *fields);
|
||||
void mbtiles_write_metadata(sqlite3 *outdb, const char *fname, char **layername, int minzoom, int maxzoom, double minlat, double minlon, double maxlat, double maxlon, double midlat, double midlon, struct pool **file_keys, int nlayers);
|
||||
|
||||
void mbtiles_close(sqlite3 *outdb, char **argv);
|
||||
|
69
memfile.c
Normal file
69
memfile.c
Normal file
@ -0,0 +1,69 @@
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <unistd.h>
|
||||
#include <sys/mman.h>
|
||||
#include "memfile.h"
|
||||
|
||||
#define INCREMENT 131072
|
||||
|
||||
struct memfile *memfile_open(int fd) {
|
||||
if (ftruncate(fd, INCREMENT) != 0) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
char *map = mmap(NULL, INCREMENT, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
|
||||
if (map == MAP_FAILED) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
struct memfile *mf = malloc(sizeof(struct memfile));
|
||||
if (mf == NULL) {
|
||||
munmap(map, INCREMENT);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
mf->fd = fd;
|
||||
mf->map = map;
|
||||
mf->len = INCREMENT;
|
||||
mf->off = 0;
|
||||
|
||||
return mf;
|
||||
}
|
||||
|
||||
int memfile_close(struct memfile *file) {
|
||||
if (munmap(file->map, file->len) != 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (file->fd >= 0) {
|
||||
if (close(file->fd) != 0) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
free(file);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int memfile_write(struct memfile *file, void *s, long long len) {
|
||||
if (file->off + len > file->len) {
|
||||
if (munmap(file->map, file->len) != 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
file->len += INCREMENT;
|
||||
|
||||
if (ftruncate(file->fd, file->len) != 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
file->map = mmap(NULL, file->len, PROT_READ | PROT_WRITE, MAP_SHARED, file->fd, 0);
|
||||
if (file->map == MAP_FAILED) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
memcpy(file->map + file->off, s, len);
|
||||
file->off += len;
|
||||
return len;
|
||||
}
|
10
memfile.h
Normal file
10
memfile.h
Normal file
@ -0,0 +1,10 @@
|
||||
struct memfile {
|
||||
int fd;
|
||||
char *map;
|
||||
long long len;
|
||||
long long off;
|
||||
};
|
||||
|
||||
struct memfile *memfile_open(int fd);
|
||||
int memfile_close(struct memfile *file);
|
||||
int memfile_write(struct memfile *file, void *s, long long len);
|
39
pool.c
39
pool.c
@ -1,12 +1,25 @@
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include "pool.h"
|
||||
|
||||
static struct pool_val *pool1(struct pool *p, char *s, int type, int (*compare)(const char *, const char *)) {
|
||||
struct pool_val **v = &(p->vals);
|
||||
#define POOL_WIDTH 256
|
||||
|
||||
static int hash(char *s) {
|
||||
int h = 0;
|
||||
for (; *s; s++) {
|
||||
h = h * 37 + *s;
|
||||
}
|
||||
h = h & 0xFF;
|
||||
return h;
|
||||
}
|
||||
|
||||
struct pool_val *pool(struct pool *p, char *s, int type) {
|
||||
int h = hash(s);
|
||||
struct pool_val **v = &(p->vals[h]);
|
||||
|
||||
while (*v != NULL) {
|
||||
int cmp = compare(s, (*v)->s);
|
||||
int cmp = strcmp(s, (*v)->s);
|
||||
|
||||
if (cmp == 0) {
|
||||
cmp = type - (*v)->type;
|
||||
@ -22,6 +35,10 @@ static struct pool_val *pool1(struct pool *p, char *s, int type, int (*compare)(
|
||||
}
|
||||
|
||||
*v = malloc(sizeof(struct pool_val));
|
||||
if (*v == NULL) {
|
||||
fprintf(stderr, "out of memory making string pool\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
(*v)->left = NULL;
|
||||
(*v)->right = NULL;
|
||||
(*v)->next = NULL;
|
||||
@ -41,7 +58,8 @@ static struct pool_val *pool1(struct pool *p, char *s, int type, int (*compare)(
|
||||
}
|
||||
|
||||
int is_pooled(struct pool *p, char *s, int type) {
|
||||
struct pool_val **v = &(p->vals);
|
||||
int h = hash(s);
|
||||
struct pool_val **v = &(p->vals[h]);
|
||||
|
||||
while (*v != NULL) {
|
||||
int cmp = strcmp(s, (*v)->s);
|
||||
@ -62,11 +80,6 @@ int is_pooled(struct pool *p, char *s, int type) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
struct pool_val *pool(struct pool *p, char *s, int type) {
|
||||
return pool1(p, s, type, strcmp);
|
||||
}
|
||||
|
||||
void pool_free1(struct pool *p, void (*func)(void *)) {
|
||||
while (p->head != NULL) {
|
||||
if (func != NULL) {
|
||||
@ -80,6 +93,8 @@ void pool_free1(struct pool *p, void (*func)(void *)) {
|
||||
|
||||
p->head = NULL;
|
||||
p->tail = NULL;
|
||||
|
||||
free(p->vals);
|
||||
p->vals = NULL;
|
||||
}
|
||||
|
||||
@ -93,7 +108,11 @@ void pool_free_strings(struct pool *p) {
|
||||
|
||||
void pool_init(struct pool *p, int n) {
|
||||
p->n = n;
|
||||
p->vals = NULL;
|
||||
p->vals = calloc(POOL_WIDTH, sizeof(struct pool_val *));
|
||||
if (p->vals == NULL) {
|
||||
fprintf(stderr, "out of memory creating string pool\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
p->head = NULL;
|
||||
p->tail = NULL;
|
||||
}
|
||||
|
3
pool.h
3
pool.h
@ -10,14 +10,13 @@ struct pool_val {
|
||||
};
|
||||
|
||||
struct pool {
|
||||
struct pool_val *vals;
|
||||
struct pool_val **vals;
|
||||
|
||||
struct pool_val *head;
|
||||
struct pool_val *tail;
|
||||
int n;
|
||||
};
|
||||
|
||||
|
||||
struct pool_val *pool(struct pool *p, char *s, int type);
|
||||
void pool_free(struct pool *p);
|
||||
void pool_free_strings(struct pool *p);
|
||||
|
@ -7,7 +7,7 @@ void latlon2tile(double lat, double lon, int zoom, unsigned int *x, unsigned int
|
||||
unsigned long long n = 1LL << zoom;
|
||||
|
||||
long long llx = n * ((lon + 180) / 360);
|
||||
long long lly = n * (1 - (log(tan(lat_rad) + 1/cos(lat_rad)) / M_PI)) / 2;
|
||||
long long lly = n * (1 - (log(tan(lat_rad) + 1 / cos(lat_rad)) / M_PI)) / 2;
|
||||
|
||||
if (lat >= 85.0511) {
|
||||
lly = 0;
|
||||
@ -53,7 +53,6 @@ unsigned long long encode(unsigned int wx, unsigned int wy) {
|
||||
out |= v;
|
||||
}
|
||||
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
|
646
tile.cc
646
tile.cc
@ -8,27 +8,31 @@
|
||||
#include <algorithm>
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <limits.h>
|
||||
#include <zlib.h>
|
||||
#include <sys/stat.h>
|
||||
#include <sys/types.h>
|
||||
#include <sys/mman.h>
|
||||
#include <math.h>
|
||||
#include <sqlite3.h>
|
||||
#include "vector_tile.pb.h"
|
||||
#include "geometry.hh"
|
||||
|
||||
extern "C" {
|
||||
#include "tile.h"
|
||||
#include "pool.h"
|
||||
#include "clip.h"
|
||||
#include "mbtiles.h"
|
||||
#include "projection.h"
|
||||
#include "tile.h"
|
||||
#include "pool.h"
|
||||
#include "clip.h"
|
||||
#include "mbtiles.h"
|
||||
#include "projection.h"
|
||||
}
|
||||
|
||||
#define CMD_BITS 3
|
||||
#define MIN_DETAIL 7
|
||||
|
||||
#define XSTRINGIFY(s) STRINGIFY(s)
|
||||
#define STRINGIFY(s) #s
|
||||
|
||||
// https://github.com/mapbox/mapnik-vector-tile/blob/master/src/vector_tile_compression.hpp
|
||||
static inline int compress(std::string const& input, std::string& output) {
|
||||
static inline int compress(std::string const &input, std::string &output) {
|
||||
z_stream deflate_s;
|
||||
deflate_s.zalloc = Z_NULL;
|
||||
deflate_s.zfree = Z_NULL;
|
||||
@ -36,14 +40,14 @@ static inline int compress(std::string const& input, std::string& output) {
|
||||
deflate_s.avail_in = 0;
|
||||
deflate_s.next_in = Z_NULL;
|
||||
deflateInit2(&deflate_s, Z_BEST_COMPRESSION, Z_DEFLATED, 31, 8, Z_DEFAULT_STRATEGY);
|
||||
deflate_s.next_in = (Bytef *)input.data();
|
||||
deflate_s.next_in = (Bytef *) input.data();
|
||||
deflate_s.avail_in = input.size();
|
||||
size_t length = 0;
|
||||
do {
|
||||
size_t increase = input.size() / 2 + 1024;
|
||||
output.resize(length + increase);
|
||||
deflate_s.avail_out = increase;
|
||||
deflate_s.next_out = (Bytef *)(output.data() + length);
|
||||
deflate_s.next_out = (Bytef *) (output.data() + length);
|
||||
int ret = deflate(&deflate_s, Z_FINISH);
|
||||
if (ret != Z_STREAM_END && ret != Z_OK && ret != Z_BUF_ERROR) {
|
||||
return -1;
|
||||
@ -130,7 +134,7 @@ struct coalesce {
|
||||
char *metasrc;
|
||||
bool coalesced;
|
||||
|
||||
bool operator< (const coalesce &o) const {
|
||||
bool operator<(const coalesce &o) const {
|
||||
int cmp = coalindexcmp(this, &o);
|
||||
if (cmp < 0) {
|
||||
return true;
|
||||
@ -187,79 +191,93 @@ int coalindexcmp(const struct coalesce *c1, const struct coalesce *c2) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
void decode_meta(char **meta, struct pool *keys, struct pool *values, struct pool *file_keys, std::vector<int> *intmeta, char *only) {
|
||||
struct pool_val *retrieve_string(char **f, struct pool *p, char *stringpool) {
|
||||
struct pool_val *ret;
|
||||
long long off;
|
||||
|
||||
deserialize_long_long(f, &off);
|
||||
ret = pool(p, stringpool + off + 1, stringpool[off]);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
void decode_meta(char **meta, char *stringpool, struct pool *keys, struct pool *values, struct pool *file_keys, std::vector<int> *intmeta, char *only) {
|
||||
int m;
|
||||
deserialize_int(meta, &m);
|
||||
|
||||
int i;
|
||||
for (i = 0; i < m; i++) {
|
||||
int t;
|
||||
deserialize_int(meta, &t);
|
||||
struct pool_val *key = deserialize_string(meta, keys, VT_STRING);
|
||||
struct pool_val *key = retrieve_string(meta, keys, stringpool);
|
||||
|
||||
if (only != NULL && (strcmp(key->s, only) != 0)) {
|
||||
deserialize_int(meta, &t);
|
||||
*meta += t;
|
||||
// XXX if evaluate ever works again, check whether this is sufficient
|
||||
(void) retrieve_string(meta, values, stringpool);
|
||||
} else {
|
||||
struct pool_val *value = deserialize_string(meta, values, t);
|
||||
struct pool_val *value = retrieve_string(meta, values, stringpool);
|
||||
|
||||
intmeta->push_back(key->n);
|
||||
intmeta->push_back(value->n);
|
||||
|
||||
if (!is_pooled(file_keys, key->s, t)) {
|
||||
if (!is_pooled(file_keys, key->s, value->type)) {
|
||||
// Dup to retain after munmap
|
||||
pool(file_keys, strdup(key->s), t);
|
||||
pool(file_keys, strdup(key->s), value->type);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mapnik::vector::tile create_tile(char *layername, int line_detail, std::vector<coalesce> &features, long long *count, struct pool *keys, struct pool *values) {
|
||||
mapnik::vector::tile create_tile(char **layernames, int line_detail, std::vector<std::vector<coalesce> > &features, long long *count, struct pool **keys, struct pool **values, int nlayers) {
|
||||
mapnik::vector::tile tile;
|
||||
mapnik::vector::tile_layer *layer = tile.add_layers();
|
||||
|
||||
layer->set_name(layername);
|
||||
layer->set_version(1);
|
||||
layer->set_extent(1 << line_detail);
|
||||
int i;
|
||||
for (i = 0; i < nlayers; i++) {
|
||||
mapnik::vector::tile_layer *layer = tile.add_layers();
|
||||
|
||||
unsigned x;
|
||||
for (x = 0; x < features.size(); x++) {
|
||||
if (features[x].type == VT_LINE || features[x].type == VT_POLYGON) {
|
||||
features[x].geom = remove_noop(features[x].geom, features[x].type);
|
||||
layer->set_name(layernames[i]);
|
||||
layer->set_version(1);
|
||||
layer->set_extent(1 << line_detail);
|
||||
|
||||
unsigned x;
|
||||
for (x = 0; x < features[i].size(); x++) {
|
||||
if (features[i][x].type == VT_LINE || features[i][x].type == VT_POLYGON) {
|
||||
features[i][x].geom = remove_noop(features[i][x].geom, features[i][x].type, 0);
|
||||
}
|
||||
|
||||
mapnik::vector::tile_feature *feature = layer->add_features();
|
||||
|
||||
if (features[i][x].type == VT_POINT) {
|
||||
feature->set_type(mapnik::vector::tile::Point);
|
||||
} else if (features[i][x].type == VT_LINE) {
|
||||
feature->set_type(mapnik::vector::tile::LineString);
|
||||
} else if (features[i][x].type == VT_POLYGON) {
|
||||
feature->set_type(mapnik::vector::tile::Polygon);
|
||||
} else {
|
||||
feature->set_type(mapnik::vector::tile::Unknown);
|
||||
}
|
||||
|
||||
to_feature(features[i][x].geom, feature);
|
||||
*count += features[i][x].geom.size();
|
||||
|
||||
unsigned y;
|
||||
for (y = 0; y < features[i][x].meta.size(); y++) {
|
||||
feature->add_tags(features[i][x].meta[y]);
|
||||
}
|
||||
}
|
||||
|
||||
mapnik::vector::tile_feature *feature = layer->add_features();
|
||||
|
||||
if (features[x].type == VT_POINT) {
|
||||
feature->set_type(mapnik::vector::tile::Point);
|
||||
} else if (features[x].type == VT_LINE) {
|
||||
feature->set_type(mapnik::vector::tile::LineString);
|
||||
} else if (features[x].type == VT_POLYGON) {
|
||||
feature->set_type(mapnik::vector::tile::Polygon);
|
||||
} else {
|
||||
feature->set_type(mapnik::vector::tile::Unknown);
|
||||
struct pool_val *pv;
|
||||
for (pv = keys[i]->head; pv != NULL; pv = pv->next) {
|
||||
layer->add_keys(pv->s, strlen(pv->s));
|
||||
}
|
||||
for (pv = values[i]->head; pv != NULL; pv = pv->next) {
|
||||
mapnik::vector::tile_value *tv = layer->add_values();
|
||||
|
||||
to_feature(features[x].geom, feature);
|
||||
*count += features[x].geom.size();
|
||||
|
||||
unsigned y;
|
||||
for (y = 0; y < features[x].meta.size(); y++) {
|
||||
feature->add_tags(features[x].meta[y]);
|
||||
}
|
||||
}
|
||||
|
||||
struct pool_val *pv;
|
||||
for (pv = keys->head; pv != NULL; pv = pv->next) {
|
||||
layer->add_keys(pv->s, strlen(pv->s));
|
||||
}
|
||||
for (pv = values->head; pv != NULL; pv = pv->next) {
|
||||
mapnik::vector::tile_value *tv = layer->add_values();
|
||||
|
||||
if (pv->type == VT_NUMBER) {
|
||||
tv->set_double_value(atof(pv->s));
|
||||
} else {
|
||||
tv->set_string_value(pv->s);
|
||||
if (pv->type == VT_NUMBER) {
|
||||
tv->set_double_value(atof(pv->s));
|
||||
} else if (pv->type == VT_BOOLEAN) {
|
||||
tv->set_bool_value(pv->s[0] == 't');
|
||||
} else {
|
||||
tv->set_string_value(pv->s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -270,7 +288,7 @@ struct sll {
|
||||
char *name;
|
||||
long long val;
|
||||
|
||||
bool operator< (const sll &o) const {
|
||||
bool operator<(const sll &o) const {
|
||||
if (this->val < o.val) {
|
||||
return true;
|
||||
} else {
|
||||
@ -284,7 +302,8 @@ struct sll {
|
||||
}
|
||||
};
|
||||
|
||||
void evaluate(std::vector<coalesce> &features, char *metabase, struct pool *file_keys, char *layername, int line_detail, long long orig) {
|
||||
#if 0
|
||||
void evaluate(std::vector<coalesce> &features, char *metabase, struct pool *file_keys, const char *layername, int line_detail, long long orig) {
|
||||
std::vector<sll> options;
|
||||
|
||||
struct pool_val *pv;
|
||||
@ -302,7 +321,7 @@ void evaluate(std::vector<coalesce> &features, char *metabase, struct pool *file
|
||||
}
|
||||
|
||||
std::vector<coalesce> empty;
|
||||
mapnik::vector::tile tile = create_tile(layername, line_detail, empty, &count, &keys, &values);
|
||||
mapnik::vector::tile tile = create_tile(layername, line_detail, empty, &count, &keys, &values, 1); // XXX layer
|
||||
|
||||
std::string s;
|
||||
std::string compressed;
|
||||
@ -329,7 +348,7 @@ void evaluate(std::vector<coalesce> &features, char *metabase, struct pool *file
|
||||
long long count = 0;
|
||||
|
||||
std::vector<coalesce> empty;
|
||||
mapnik::vector::tile tile = create_tile(layername, line_detail, features, &count, &keys, &values);
|
||||
mapnik::vector::tile tile = create_tile(layername, line_detail, features, &count, &keys, &values, nlayers);
|
||||
|
||||
std::string s;
|
||||
std::string compressed;
|
||||
@ -341,62 +360,281 @@ void evaluate(std::vector<coalesce> &features, char *metabase, struct pool *file
|
||||
pool_free(&values);
|
||||
pool_free(&keys);
|
||||
}
|
||||
#endif
|
||||
|
||||
long long write_tile(struct index *start, struct index *end, char *metabase, unsigned *file_bbox, int z, unsigned tx, unsigned ty, int detail, int basezoom, struct pool *file_keys, char *layername, sqlite3 *outdb, double droprate, int buffer) {
|
||||
void rewrite(drawvec &geom, int z, int nextzoom, int file_maxzoom, long long *bbox, unsigned tx, unsigned ty, int buffer, int line_detail, int *within, long long *geompos, FILE **geomfile, const char *fname, signed char t, int layer, long long metastart, signed char feature_minzoom) {
|
||||
if (geom.size() > 0 && nextzoom <= file_maxzoom) {
|
||||
int xo, yo;
|
||||
int span = 1 << (nextzoom - z);
|
||||
|
||||
// Get the feature bounding box in pixel (256) coordinates at the child zoom
|
||||
// in order to calculate which sub-tiles it can touch including the buffer.
|
||||
long long bbox2[4];
|
||||
int k;
|
||||
for (k = 0; k < 4; k++) {
|
||||
// Division instead of right-shift because coordinates can be negative
|
||||
bbox2[k] = bbox[k] / (1 << (32 - nextzoom - 8));
|
||||
}
|
||||
bbox2[0] -= buffer;
|
||||
bbox2[1] -= buffer;
|
||||
bbox2[2] += buffer;
|
||||
bbox2[3] += buffer;
|
||||
|
||||
for (k = 0; k < 4; k++) {
|
||||
if (bbox2[k] < 0) {
|
||||
bbox2[k] = 0;
|
||||
}
|
||||
if (bbox2[k] >= 256 * span) {
|
||||
bbox2[k] = 256 * (span - 1);
|
||||
}
|
||||
|
||||
bbox2[k] /= 256;
|
||||
}
|
||||
|
||||
for (xo = bbox2[0]; xo <= bbox2[2]; xo++) {
|
||||
for (yo = bbox2[1]; yo <= bbox2[3]; yo++) {
|
||||
unsigned jx = tx * span + xo;
|
||||
unsigned jy = ty * span + yo;
|
||||
|
||||
// j is the shard that the child tile's data is being written to.
|
||||
//
|
||||
// Be careful: We can't jump more zoom levels than MAX_ZOOM_INCREMENT
|
||||
// because that could break the constraint that each of the children
|
||||
// of the current tile must have its own shard, because the data for
|
||||
// the child tile must be contiguous within the shard.
|
||||
//
|
||||
// But it's OK to spread children across all the shards, not just
|
||||
// the four that would normally result from splitting one tile,
|
||||
// because it will go through all the shards when it does the
|
||||
// next zoom.
|
||||
|
||||
int j = ((jx & ((1 << MAX_ZOOM_INCREMENT) - 1)) << MAX_ZOOM_INCREMENT) |
|
||||
((jy & ((1 << MAX_ZOOM_INCREMENT) - 1)));
|
||||
|
||||
{
|
||||
if (!within[j]) {
|
||||
serialize_int(geomfile[j], nextzoom, &geompos[j], fname);
|
||||
serialize_uint(geomfile[j], tx * span + xo, &geompos[j], fname);
|
||||
serialize_uint(geomfile[j], ty * span + yo, &geompos[j], fname);
|
||||
within[j] = 1;
|
||||
}
|
||||
|
||||
// Offset from tile coordinates back to world coordinates
|
||||
unsigned sx = 0, sy = 0;
|
||||
if (z != 0) {
|
||||
sx = tx << (32 - z);
|
||||
sy = ty << (32 - z);
|
||||
}
|
||||
|
||||
// printf("type %d, meta %lld\n", t, metastart);
|
||||
serialize_byte(geomfile[j], t, &geompos[j], fname);
|
||||
serialize_long_long(geomfile[j], layer, &geompos[j], fname);
|
||||
serialize_long_long(geomfile[j], metastart, &geompos[j], fname);
|
||||
long long wx = initial_x, wy = initial_y;
|
||||
|
||||
for (unsigned u = 0; u < geom.size(); u++) {
|
||||
serialize_byte(geomfile[j], geom[u].op, &geompos[j], fname);
|
||||
|
||||
if (geom[u].op != VT_CLOSEPATH) {
|
||||
serialize_long_long(geomfile[j], ((geom[u].x + sx) >> geometry_scale) - (wx >> geometry_scale), &geompos[j], fname);
|
||||
serialize_long_long(geomfile[j], ((geom[u].y + sy) >> geometry_scale) - (wy >> geometry_scale), &geompos[j], fname);
|
||||
wx = geom[u].x + sx;
|
||||
wy = geom[u].y + sy;
|
||||
}
|
||||
}
|
||||
|
||||
serialize_byte(geomfile[j], VT_END, &geompos[j], fname);
|
||||
serialize_byte(geomfile[j], feature_minzoom, &geompos[j], fname);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
long long write_tile(char **geoms, char *metabase, char *stringpool, unsigned *file_bbox, int z, unsigned tx, unsigned ty, int detail, int min_detail, int basezoom, struct pool **file_keys, char **layernames, sqlite3 *outdb, double droprate, int buffer, const char *fname, FILE **geomfile, int file_minzoom, int file_maxzoom, double todo, char *geomstart, long long along, double gamma, int nlayers, char *prevent) {
|
||||
int line_detail;
|
||||
static bool evaluated = false;
|
||||
double oprogress = 0;
|
||||
double fraction = 1;
|
||||
|
||||
for (line_detail = detail; line_detail >= MIN_DETAIL || line_detail == detail; line_detail--) {
|
||||
char *og = *geoms;
|
||||
|
||||
int nextzoom = z + 1;
|
||||
if (nextzoom < file_minzoom) {
|
||||
if (z + MAX_ZOOM_INCREMENT > file_minzoom) {
|
||||
nextzoom = file_minzoom;
|
||||
} else {
|
||||
nextzoom = z + MAX_ZOOM_INCREMENT;
|
||||
}
|
||||
}
|
||||
|
||||
for (line_detail = detail; line_detail >= min_detail || line_detail == detail; line_detail--) {
|
||||
GOOGLE_PROTOBUF_VERIFY_VERSION;
|
||||
|
||||
struct pool keys, values;
|
||||
pool_init(&keys, 0);
|
||||
pool_init(&values, 0);
|
||||
std::set<long long> dup;
|
||||
struct pool keys1[nlayers], values1[nlayers];
|
||||
struct pool *keys[nlayers], *values[nlayers];
|
||||
int i;
|
||||
for (i = 0; i < nlayers; i++) {
|
||||
pool_init(&keys1[i], 0);
|
||||
pool_init(&values1[i], 0);
|
||||
|
||||
keys[i] = &keys1[i];
|
||||
values[i] = &values1[i];
|
||||
}
|
||||
|
||||
long long count = 0;
|
||||
//long long along = 0;
|
||||
// long long along = 0;
|
||||
double accum_area = 0;
|
||||
|
||||
std::vector<coalesce> features;
|
||||
double interval = 0;
|
||||
double seq = 0;
|
||||
if (z < basezoom) {
|
||||
interval = exp(log(droprate) * (basezoom - z));
|
||||
}
|
||||
|
||||
struct index *i;
|
||||
for (i = start; i < end; i++) {
|
||||
int t = i->type;
|
||||
double fraction_accum = 0;
|
||||
|
||||
if (z > i->maxzoom) {
|
||||
continue;
|
||||
unsigned long long previndex = 0;
|
||||
double scale = (double) (1LL << (64 - 2 * (z + 8)));
|
||||
double gap = 0;
|
||||
|
||||
long long original_features = 0;
|
||||
long long unclipped_features = 0;
|
||||
|
||||
std::vector<std::vector<coalesce> > features;
|
||||
for (i = 0; i < nlayers; i++) {
|
||||
features.push_back(std::vector<coalesce>());
|
||||
}
|
||||
|
||||
int within[(1 << MAX_ZOOM_INCREMENT) * (1 << MAX_ZOOM_INCREMENT)] = {0};
|
||||
long long geompos[(1 << MAX_ZOOM_INCREMENT) * (1 << MAX_ZOOM_INCREMENT)] = {0};
|
||||
|
||||
*geoms = og;
|
||||
|
||||
while (1) {
|
||||
signed char t;
|
||||
deserialize_byte(geoms, &t);
|
||||
if (t < 0) {
|
||||
break;
|
||||
}
|
||||
if ((t == VT_LINE && z + line_detail <= i->minzoom) ||
|
||||
(t == VT_POINT && z < i->minzoom)) {
|
||||
|
||||
long long layer;
|
||||
deserialize_long_long(geoms, &layer);
|
||||
|
||||
long long metastart;
|
||||
deserialize_long_long(geoms, &metastart);
|
||||
char *meta = metabase + metastart;
|
||||
long long bbox[4];
|
||||
|
||||
drawvec geom = decode_geometry(geoms, z, tx, ty, line_detail, bbox);
|
||||
|
||||
signed char feature_minzoom;
|
||||
deserialize_byte(geoms, &feature_minzoom);
|
||||
|
||||
double progress = floor((((*geoms - geomstart + along) / (double) todo) + z) / (file_maxzoom + 1) * 1000) / 10;
|
||||
if (progress != oprogress) {
|
||||
if (!quiet) {
|
||||
fprintf(stderr, " %3.1f%% %d/%u/%u \r", progress, z, tx, ty);
|
||||
}
|
||||
oprogress = progress;
|
||||
}
|
||||
|
||||
original_features++;
|
||||
|
||||
int quick = quick_check(bbox, z, line_detail, buffer);
|
||||
if (quick == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (i->candup) {
|
||||
if (dup.count(i->fpos) != 0) {
|
||||
if (quick != 1) {
|
||||
if (t == VT_LINE) {
|
||||
geom = clip_lines(geom, z, line_detail, buffer);
|
||||
}
|
||||
if (t == VT_POLYGON) {
|
||||
geom = clip_poly(geom, z, line_detail, buffer);
|
||||
}
|
||||
if (t == VT_POINT) {
|
||||
geom = clip_point(geom, z, line_detail, buffer);
|
||||
}
|
||||
|
||||
geom = remove_noop(geom, t, 0);
|
||||
}
|
||||
|
||||
if (geom.size() > 0) {
|
||||
unclipped_features++;
|
||||
}
|
||||
|
||||
if (line_detail == detail && fraction == 1) { /* only write out the next zoom once, even if we retry */
|
||||
rewrite(geom, z, nextzoom, file_maxzoom, bbox, tx, ty, buffer, line_detail, within, geompos, geomfile, fname, t, layer, metastart, feature_minzoom);
|
||||
}
|
||||
|
||||
if (z < file_minzoom) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (t == VT_LINE && z + line_detail <= feature_minzoom) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (t == VT_POINT && z < feature_minzoom && gamma < 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (gamma >= 0 && (t == VT_POINT || (prevent['l' & 0xFF] && t == VT_LINE))) {
|
||||
seq++;
|
||||
if (seq >= 0) {
|
||||
seq -= interval;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
dup.insert(i->fpos);
|
||||
|
||||
if (gamma > 0) {
|
||||
unsigned long long index = encode(bbox[0] / 2 + bbox[2] / 2, bbox[1] / 2 + bbox[3] / 2);
|
||||
if (gap > 0) {
|
||||
if (index == previndex) {
|
||||
continue; // Exact duplicate: can't fulfil the gap requirement
|
||||
}
|
||||
|
||||
if (exp(log((index - previndex) / scale) * gamma) >= gap) {
|
||||
// Dot is further from the previous than the nth root of the gap,
|
||||
// so produce it, and choose a new gap at the next point.
|
||||
gap = 0;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
gap = (index - previndex) / scale;
|
||||
|
||||
if (gap == 0) {
|
||||
continue; // Exact duplicate: skip
|
||||
} else if (gap < 1) {
|
||||
continue; // Narrow dot spacing: need to stretch out
|
||||
} else {
|
||||
gap = 0; // Wider spacing than minimum: so pass through unchanged
|
||||
}
|
||||
}
|
||||
|
||||
previndex = index;
|
||||
}
|
||||
}
|
||||
|
||||
char *meta = metabase + i->fpos;
|
||||
drawvec geom = decode_geometry(&meta, z, tx, ty, line_detail);
|
||||
fraction_accum += fraction;
|
||||
if (fraction_accum < 1) {
|
||||
continue;
|
||||
}
|
||||
fraction_accum -= 1;
|
||||
|
||||
bool reduced = false;
|
||||
if (t == VT_POLYGON) {
|
||||
geom = reduce_tiny_poly(geom, z, line_detail, &reduced, &accum_area);
|
||||
}
|
||||
|
||||
if (t == VT_LINE) {
|
||||
geom = clip_lines(geom, z, line_detail, buffer);
|
||||
}
|
||||
|
||||
if (t == VT_POLYGON) {
|
||||
geom = clip_poly(geom, z, line_detail, buffer);
|
||||
}
|
||||
|
||||
if (t == VT_LINE || t == VT_POLYGON) {
|
||||
if ((t == VT_LINE || t == VT_POLYGON) && !prevent['s' & 0xFF]) {
|
||||
if (!reduced) {
|
||||
if (t == VT_LINE) {
|
||||
geom = remove_noop(geom, t, 32 - z - line_detail);
|
||||
}
|
||||
|
||||
geom = simplify_lines(geom, z, line_detail);
|
||||
}
|
||||
}
|
||||
@ -407,7 +645,7 @@ long long write_tile(struct index *start, struct index *end, char *metabase, uns
|
||||
}
|
||||
#endif
|
||||
|
||||
if (t == VT_LINE) {
|
||||
if (t == VT_LINE && !prevent['r' & 0xFF]) {
|
||||
geom = reorder_lines(geom);
|
||||
}
|
||||
|
||||
@ -428,53 +666,86 @@ long long write_tile(struct index *start, struct index *end, char *metabase, uns
|
||||
c.index2 = ~0LL;
|
||||
}
|
||||
} else {
|
||||
c.index = i->index;
|
||||
c.index2 = i->index;
|
||||
c.index = 0;
|
||||
c.index2 = 0;
|
||||
}
|
||||
c.geom = geom;
|
||||
c.metasrc = meta;
|
||||
c.coalesced = false;
|
||||
|
||||
decode_meta(&meta, &keys, &values, file_keys, &c.meta, NULL);
|
||||
features.push_back(c);
|
||||
decode_meta(&meta, stringpool, keys[layer], values[layer], file_keys[layer], &c.meta, NULL);
|
||||
features[layer].push_back(c);
|
||||
}
|
||||
}
|
||||
|
||||
std::sort(features.begin(), features.end());
|
||||
int j;
|
||||
for (j = 0; j < (1 << MAX_ZOOM_INCREMENT) * (1 << MAX_ZOOM_INCREMENT); j++) {
|
||||
if (within[j]) {
|
||||
serialize_byte(geomfile[j], -2, &geompos[j], fname);
|
||||
within[j] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<coalesce> out;
|
||||
unsigned x;
|
||||
for (x = 0; x < features.size(); x++) {
|
||||
unsigned y = out.size() - 1;
|
||||
|
||||
if (out.size() > 0 && coalcmp(&features[x], &out[y]) < 0) {
|
||||
fprintf(stderr, "\nfeature out of order\n");
|
||||
for (j = 0; j < nlayers; j++) {
|
||||
if (!prevent['o' & 0xFF]) {
|
||||
std::sort(features[j].begin(), features[j].end());
|
||||
}
|
||||
|
||||
if (out.size() > 0 && out[y].geom.size() + features[x].geom.size() < 20000 && coalcmp(&features[x], &out[y]) == 0 && features[x].type != VT_POINT) {
|
||||
unsigned z;
|
||||
for (z = 0; z < features[x].geom.size(); z++) {
|
||||
out[y].geom.push_back(features[x].geom[z]);
|
||||
std::vector<coalesce> out;
|
||||
unsigned x;
|
||||
for (x = 0; x < features[j].size(); x++) {
|
||||
unsigned y = out.size() - 1;
|
||||
|
||||
#if 0
|
||||
if (out.size() > 0 && coalcmp(&features[j][x], &out[y]) < 0) {
|
||||
fprintf(stderr, "\nfeature out of order\n");
|
||||
}
|
||||
out[y].coalesced = true;
|
||||
} else {
|
||||
out.push_back(features[x]);
|
||||
}
|
||||
}
|
||||
features = out;
|
||||
#endif
|
||||
|
||||
for (x = 0; x < features.size(); x++) {
|
||||
if (features[x].coalesced && features[x].type == VT_LINE) {
|
||||
features[x].geom = remove_noop(features[x].geom, features[x].type);
|
||||
features[x].geom = simplify_lines(features[x].geom, 32, 0);
|
||||
if (!prevent['c' & 0xFF] && out.size() > 0 && out[y].geom.size() + features[j][x].geom.size() < 20000 && coalcmp(&features[j][x], &out[y]) == 0 && features[j][x].type != VT_POINT) {
|
||||
unsigned z;
|
||||
for (z = 0; z < features[j][x].geom.size(); z++) {
|
||||
out[y].geom.push_back(features[j][x].geom[z]);
|
||||
}
|
||||
out[y].coalesced = true;
|
||||
} else {
|
||||
out.push_back(features[j][x]);
|
||||
}
|
||||
}
|
||||
features[j] = out;
|
||||
|
||||
for (x = 0; x < features[j].size(); x++) {
|
||||
if (features[j][x].coalesced && features[j][x].type == VT_LINE) {
|
||||
features[j][x].geom = remove_noop(features[j][x].geom, features[j][x].type, 0);
|
||||
features[j][x].geom = simplify_lines(features[j][x].geom, 32, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (features.size() > 0) {
|
||||
mapnik::vector::tile tile = create_tile(layername, line_detail, features, &count, &keys, &values);
|
||||
if (z == 0 && unclipped_features < original_features / 2) {
|
||||
fprintf(stderr, "\n\nMore than half the features were clipped away at zoom level 0.\n");
|
||||
fprintf(stderr, "Is your data in the wrong projection? It should be in WGS84/EPSG:4326.\n");
|
||||
}
|
||||
|
||||
pool_free(&keys);
|
||||
pool_free(&values);
|
||||
long long totalsize = 0;
|
||||
for (j = 0; j < nlayers; j++) {
|
||||
totalsize += features[j].size();
|
||||
}
|
||||
|
||||
if (totalsize > 0) {
|
||||
if (totalsize > 200000 && !prevent['f' & 0xFF]) {
|
||||
fprintf(stderr, "tile %d/%u/%u has %lld features, >200000 \n", z, tx, ty, totalsize);
|
||||
fprintf(stderr, "Try using -z to set a higher base zoom level.\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
mapnik::vector::tile tile = create_tile(layernames, line_detail, features, &count, keys, values, nlayers);
|
||||
|
||||
int i;
|
||||
for (i = 0; i < nlayers; i++) {
|
||||
pool_free(&keys1[i]);
|
||||
pool_free(&values1[i]);
|
||||
}
|
||||
|
||||
std::string s;
|
||||
std::string compressed;
|
||||
@ -482,23 +753,144 @@ long long write_tile(struct index *start, struct index *end, char *metabase, uns
|
||||
tile.SerializeToString(&s);
|
||||
compress(s, compressed);
|
||||
|
||||
if (compressed.size() > 500000) {
|
||||
fprintf(stderr, "tile %d/%u/%u size is %lld with detail %d, >500000 \n", z, tx, ty, (long long) compressed.size(), line_detail);
|
||||
if (compressed.size() > 500000 && !prevent['k' & 0xFF]) {
|
||||
if (!quiet) {
|
||||
fprintf(stderr, "tile %d/%u/%u size is %lld with detail %d, >500000 \n", z, tx, ty, (long long) compressed.size(), line_detail);
|
||||
}
|
||||
|
||||
if (line_detail == MIN_DETAIL || !evaluated) {
|
||||
if (line_detail == min_detail || !evaluated) {
|
||||
evaluated = true;
|
||||
evaluate(features, metabase, file_keys, layername, line_detail, compressed.size());
|
||||
#if 0
|
||||
evaluate(features[0], metabase, file_keys[0], layername, line_detail, compressed.size()); // XXX layer
|
||||
#endif
|
||||
}
|
||||
|
||||
if (prevent['d' & 0xFF]) {
|
||||
// The 95% is a guess to avoid too many retries
|
||||
// and probably actually varies based on how much duplicated metadata there is
|
||||
|
||||
fraction = fraction * 500000 / compressed.size() * 0.95;
|
||||
if (!quiet) {
|
||||
fprintf(stderr, "Going to try keeping %0.2f%% of the features to make it fit\n", fraction * 100);
|
||||
}
|
||||
line_detail++; // to keep it the same when the loop decrements it
|
||||
}
|
||||
} else {
|
||||
mbtiles_write_tile(outdb, z, tx, ty, compressed.data(), compressed.size());
|
||||
return count;
|
||||
}
|
||||
} else {
|
||||
int i;
|
||||
for (i = 0; i < nlayers; i++) {
|
||||
pool_free(&keys1[i]);
|
||||
pool_free(&values1[i]);
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
}
|
||||
|
||||
fprintf(stderr, "could not make tile %d/%u/%u small enough\n", z, tx, ty);
|
||||
exit(EXIT_FAILURE);
|
||||
return -1;
|
||||
}
|
||||
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, unsigned *file_bbox, struct pool **file_keys, unsigned *midx, unsigned *midy, char **layernames, int maxzoom, int minzoom, sqlite3 *outdb, double droprate, int buffer, const char *fname, const char *tmpdir, double gamma, int nlayers, char *prevent, int full_detail, int low_detail, int min_detail) {
|
||||
int i;
|
||||
for (i = 0; i <= maxzoom; i++) {
|
||||
long long most = 0;
|
||||
|
||||
FILE *sub[(1 << MAX_ZOOM_INCREMENT) * (1 << MAX_ZOOM_INCREMENT)];
|
||||
int subfd[(1 << MAX_ZOOM_INCREMENT) * (1 << MAX_ZOOM_INCREMENT)];
|
||||
int j;
|
||||
for (j = 0; j < (1 << MAX_ZOOM_INCREMENT) * (1 << MAX_ZOOM_INCREMENT); j++) {
|
||||
char geomname[strlen(tmpdir) + strlen("/geom.XXXXXXXX" XSTRINGIFY(INT_MAX)) + 1];
|
||||
sprintf(geomname, "%s/geom%d.XXXXXXXX", tmpdir, j);
|
||||
subfd[j] = mkstemp(geomname);
|
||||
// printf("%s\n", geomname);
|
||||
if (subfd[j] < 0) {
|
||||
perror(geomname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
sub[j] = fopen(geomname, "wb");
|
||||
if (sub[j] == NULL) {
|
||||
perror(geomname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
unlink(geomname);
|
||||
}
|
||||
|
||||
long long todo = 0;
|
||||
long long along = 0;
|
||||
for (j = 0; j < (1 << MAX_ZOOM_INCREMENT) * (1 << MAX_ZOOM_INCREMENT); j++) {
|
||||
todo += geom_size[j];
|
||||
}
|
||||
|
||||
for (j = 0; j < (1 << MAX_ZOOM_INCREMENT) * (1 << MAX_ZOOM_INCREMENT); j++) {
|
||||
if (geomfd[j] < 0) {
|
||||
// only one source file for zoom level 0
|
||||
continue;
|
||||
}
|
||||
if (geom_size[j] == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// printf("%lld of geom_size\n", (long long) geom_size[j]);
|
||||
|
||||
char *geom = (char *) mmap(NULL, geom_size[j], PROT_READ, MAP_PRIVATE, geomfd[j], 0);
|
||||
if (geom == MAP_FAILED) {
|
||||
perror("mmap geom");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
char *geomstart = geom;
|
||||
char *end = geom + geom_size[j];
|
||||
|
||||
while (geom < end) {
|
||||
int z;
|
||||
unsigned x, y;
|
||||
|
||||
deserialize_int(&geom, &z);
|
||||
deserialize_uint(&geom, &x);
|
||||
deserialize_uint(&geom, &y);
|
||||
|
||||
// fprintf(stderr, "%d/%u/%u\n", z, x, y);
|
||||
|
||||
long long len = write_tile(&geom, metabase, stringpool, file_bbox, z, x, y, z == maxzoom ? full_detail : low_detail, min_detail, maxzoom, file_keys, layernames, outdb, droprate, buffer, fname, sub, minzoom, maxzoom, todo, geomstart, along, gamma, nlayers, prevent);
|
||||
|
||||
if (len < 0) {
|
||||
return i - 1;
|
||||
}
|
||||
|
||||
if (z == maxzoom && len > most) {
|
||||
*midx = x;
|
||||
*midy = y;
|
||||
most = len;
|
||||
}
|
||||
}
|
||||
|
||||
if (munmap(geomstart, geom_size[j]) != 0) {
|
||||
perror("munmap geom");
|
||||
}
|
||||
along += geom_size[j];
|
||||
}
|
||||
|
||||
for (j = 0; j < (1 << MAX_ZOOM_INCREMENT) * (1 << MAX_ZOOM_INCREMENT); j++) {
|
||||
close(geomfd[j]);
|
||||
fclose(sub[j]);
|
||||
|
||||
struct stat geomst;
|
||||
if (fstat(subfd[j], &geomst) != 0) {
|
||||
perror("stat geom\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
geomfd[j] = subfd[j];
|
||||
geom_size[j] = geomst.st_size;
|
||||
}
|
||||
}
|
||||
|
||||
if (!quiet) {
|
||||
fprintf(stderr, "\n");
|
||||
}
|
||||
return maxzoom;
|
||||
}
|
||||
|
24
tile.h
24
tile.h
@ -13,18 +13,24 @@
|
||||
|
||||
struct pool;
|
||||
|
||||
void serialize_int(FILE *out, int n, long long *fpos, const char *fname);
|
||||
void serialize_long_long(FILE *out, long long n, long long *fpos, const char *fname);
|
||||
void serialize_byte(FILE *out, signed char n, long long *fpos, const char *fname);
|
||||
void serialize_uint(FILE *out, unsigned n, long long *fpos, const char *fname);
|
||||
void serialize_string(FILE *out, const char *s, long long *fpos, const char *fname);
|
||||
|
||||
void deserialize_int(char **f, int *n);
|
||||
void deserialize_long_long(char **f, long long *n);
|
||||
void deserialize_uint(char **f, unsigned *n);
|
||||
void deserialize_byte(char **f, signed char *n);
|
||||
struct pool_val *deserialize_string(char **f, struct pool *p, int type);
|
||||
|
||||
long long write_tile(char **geom, char *metabase, char *stringpool, unsigned *file_bbox, int z, unsigned x, unsigned y, int detail, int min_detail, int basezoom, struct pool **file_keys, char **layernames, sqlite3 *outdb, double droprate, int buffer, const char *fname, FILE **geomfile, int file_minzoom, int file_maxzoom, double todo, char *geomstart, long long along, double gamma, int nlayers, char *prevent);
|
||||
|
||||
struct index {
|
||||
unsigned long long index;
|
||||
long long fpos : 44;
|
||||
int maxzoom : 6;
|
||||
int minzoom : 6;
|
||||
int type : 7;
|
||||
int candup : 1;
|
||||
};
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, unsigned *file_bbox, struct pool **file_keys, unsigned *midx, unsigned *midy, char **layernames, int maxzoom, int minzoom, sqlite3 *outdb, double droprate, int buffer, const char *fname, const char *tmpdir, double gamma, int nlayers, char *prevent, int full_detail, int low_detail, int min_detail);
|
||||
|
||||
long long write_tile(struct index *start, struct index *end, char *metabase, unsigned *file_bbox, int z, unsigned x, unsigned y, int detail, int basezoom, struct pool *file_keys, char *layername, sqlite3 *outdb, double droprate, int buffer);
|
||||
extern unsigned initial_x, initial_y;
|
||||
extern int geometry_scale;
|
||||
extern int quiet;
|
||||
|
||||
#define MAX_ZOOM_INCREMENT 3
|
||||
|
Reference in New Issue
Block a user