mirror of
https://github.com/mapbox/tippecanoe.git
synced 2025-03-25 13:17:38 +00:00
Merge branch 'master' into geobuf
This commit is contained in:
commit
23a4ed8754
@ -1,3 +1,7 @@
|
||||
## 1.22.0
|
||||
|
||||
* Add options to filter each tile's contents through a shell pipeline
|
||||
|
||||
## 1.21.0
|
||||
|
||||
* Generate layer, feature, and attribute statistics as part of tileset metadata
|
||||
|
23
Makefile
23
Makefile
@ -46,13 +46,13 @@ C = $(wildcard *.c) $(wildcard *.cpp)
|
||||
INCLUDES = -I/usr/local/include -I.
|
||||
LIBS = -L/usr/local/lib
|
||||
|
||||
tippecanoe: geojson.o jsonpull/jsonpull.o tile.o pool.o mbtiles.o geometry.o projection.o memfile.o mvt.o serial.o main.o text.o dirtiles.o geobuf.o
|
||||
tippecanoe: geojson.o jsonpull/jsonpull.o tile.o pool.o mbtiles.o geometry.o projection.o memfile.o mvt.o serial.o main.o text.o dirtiles.o plugin.o read_json.o write_json.o geobuf.o
|
||||
$(CXX) $(PG) $(LIBS) $(FINAL_FLAGS) $(CXXFLAGS) -o $@ $^ $(LDFLAGS) -lm -lz -lsqlite3 -lpthread
|
||||
|
||||
tippecanoe-enumerate: enumerate.o
|
||||
$(CXX) $(PG) $(LIBS) $(FINAL_FLAGS) $(CFLAGS) -o $@ $^ $(LDFLAGS) -lsqlite3
|
||||
|
||||
tippecanoe-decode: decode.o projection.o mvt.o
|
||||
tippecanoe-decode: decode.o projection.o mvt.o write_json.o
|
||||
$(CXX) $(PG) $(LIBS) $(FINAL_FLAGS) $(CXXFLAGS) -o $@ $^ $(LDFLAGS) -lm -lz -lsqlite3
|
||||
|
||||
tile-join: tile-join.o projection.o pool.o mbtiles.o mvt.o memfile.o dirtiles.o jsonpull/jsonpull.o text.o
|
||||
@ -90,17 +90,18 @@ test: tippecanoe tippecanoe-decode $(addsuffix .check,$(TESTS)) raw-tiles-test p
|
||||
|
||||
parallel-test:
|
||||
mkdir -p tests/parallel
|
||||
perl -e 'for ($$i = 0; $$i < 20; $$i++) { $$lon = rand(360) - 180; $$lat = rand(180) - 90; print "{ \"type\": \"Feature\", \"properties\": { \"yes\": \"no\", \"who\": 1 }, \"geometry\": { \"type\": \"Point\", \"coordinates\": [ $$lon, $$lat ] } }\n"; }' > tests/parallel/in1.json
|
||||
perl -e 'for ($$i = 0; $$i < 20; $$i++) { $$lon = rand(360) - 180; $$lat = rand(180) - 90; $$k = rand(1); $$v = rand(1); print "{ \"type\": \"Feature\", \"properties\": { \"yes\": \"no\", \"who\": 1, \"$$k\": \"$$v\" }, \"geometry\": { \"type\": \"Point\", \"coordinates\": [ $$lon, $$lat ] } }\n"; }' > tests/parallel/in1.json
|
||||
perl -e 'for ($$i = 0; $$i < 300000; $$i++) { $$lon = rand(360) - 180; $$lat = rand(180) - 90; print "{ \"type\": \"Feature\", \"properties\": { }, \"geometry\": { \"type\": \"Point\", \"coordinates\": [ $$lon, $$lat ] } }\n"; }' > tests/parallel/in2.json
|
||||
perl -e 'for ($$i = 0; $$i < 20; $$i++) { $$lon = rand(360) - 180; $$lat = rand(180) - 90; print "{ \"type\": \"Feature\", \"properties\": { }, \"geometry\": { \"type\": \"Point\", \"coordinates\": [ $$lon, $$lat ] } }\n"; }' > tests/parallel/in3.json
|
||||
perl -e 'for ($$i = 0; $$i < 20; $$i++) { $$lon = rand(360) - 180; $$lat = rand(180) - 90; $$v = rand(1); print "{ \"type\": \"Feature\", \"properties\": { }, \"tippecanoe\": { \"layer\": \"$$v\" }, \"geometry\": { \"type\": \"Point\", \"coordinates\": [ $$lon, $$lat ] } }\n"; }' > tests/parallel/in4.json
|
||||
echo -n "" > tests/parallel/empty1.json
|
||||
echo "" > tests/parallel/empty2.json
|
||||
./tippecanoe -z5 -f -pi -l test -n test -o tests/parallel/linear-file.mbtiles tests/parallel/in[123].json tests/parallel/empty[12].json
|
||||
./tippecanoe -z5 -f -pi -l test -n test -P -o tests/parallel/parallel-file.mbtiles tests/parallel/in[123].json tests/parallel/empty[12].json
|
||||
cat tests/parallel/in[123].json | ./tippecanoe -z5 -f -pi -l test -n test -o tests/parallel/linear-pipe.mbtiles
|
||||
cat tests/parallel/in[123].json | ./tippecanoe -z5 -f -pi -l test -n test -P -o tests/parallel/parallel-pipe.mbtiles
|
||||
cat tests/parallel/in[123].json | sed 's/^/@/' | tr '@' '\036' | ./tippecanoe -z5 -f -pi -l test -n test -o tests/parallel/implicit-pipe.mbtiles
|
||||
./tippecanoe -z5 -f -pi -l test -n test -P -o tests/parallel/parallel-pipes.mbtiles <(cat tests/parallel/in1.json) <(cat tests/parallel/empty1.json) <(cat tests/parallel/empty2.json) <(cat tests/parallel/in2.json) /dev/null <(cat tests/parallel/in3.json)
|
||||
./tippecanoe -z5 -f -pi -l test -n test -o tests/parallel/linear-file.mbtiles tests/parallel/in[1234].json tests/parallel/empty[12].json
|
||||
./tippecanoe -z5 -f -pi -l test -n test -P -o tests/parallel/parallel-file.mbtiles tests/parallel/in[1234].json tests/parallel/empty[12].json
|
||||
cat tests/parallel/in[1234].json | ./tippecanoe -z5 -f -pi -l test -n test -o tests/parallel/linear-pipe.mbtiles
|
||||
cat tests/parallel/in[1234].json | ./tippecanoe -z5 -f -pi -l test -n test -P -o tests/parallel/parallel-pipe.mbtiles
|
||||
cat tests/parallel/in[1234].json | sed 's/^/@/' | tr '@' '\036' | ./tippecanoe -z5 -f -pi -l test -n test -o tests/parallel/implicit-pipe.mbtiles
|
||||
./tippecanoe -z5 -f -pi -l test -n test -P -o tests/parallel/parallel-pipes.mbtiles <(cat tests/parallel/in1.json) <(cat tests/parallel/empty1.json) <(cat tests/parallel/empty2.json) <(cat tests/parallel/in2.json) /dev/null <(cat tests/parallel/in3.json) <(cat tests/parallel/in4.json)
|
||||
./tippecanoe-decode tests/parallel/linear-file.mbtiles > tests/parallel/linear-file.json
|
||||
./tippecanoe-decode tests/parallel/parallel-file.mbtiles > tests/parallel/parallel-file.json
|
||||
./tippecanoe-decode tests/parallel/linear-pipe.mbtiles > tests/parallel/linear-pipe.json
|
||||
@ -123,8 +124,10 @@ decode-test:
|
||||
mkdir -p tests/muni/decode
|
||||
./tippecanoe -z11 -Z11 -f -o tests/muni/decode/multi.mbtiles tests/muni/*.json
|
||||
./tippecanoe-decode -l subway tests/muni/decode/multi.mbtiles > tests/muni/decode/multi.mbtiles.json.check
|
||||
./tippecanoe-decode -c tests/muni/decode/multi.mbtiles > tests/muni/decode/multi.mbtiles.pipeline.json.check
|
||||
cmp tests/muni/decode/multi.mbtiles.json.check tests/muni/decode/multi.mbtiles.json
|
||||
rm -f tests/muni/decode/multi.mbtiles.json.check tests/muni/decode/multi.mbtiles
|
||||
cmp tests/muni/decode/multi.mbtiles.pipeline.json.check tests/muni/decode/multi.mbtiles.pipeline.json
|
||||
rm -f tests/muni/decode/multi.mbtiles.json.check tests/muni/decode/multi.mbtiles tests/muni/decode/multi.mbtiles.pipeline.json.check
|
||||
|
||||
pbf-test:
|
||||
./tippecanoe-decode tests/pbf/11-328-791.vector.pbf 11 328 791 > tests/pbf/11-328-791.vector.pbf.out
|
||||
|
46
README.md
46
README.md
@ -236,6 +236,51 @@ resolution is obtained than by using a smaller _maxzoom_ or _detail_.
|
||||
* `-q` or `--quiet`: Work quietly instead of reporting progress
|
||||
* `-v` or `--version`: Report Tippecanoe's version number
|
||||
|
||||
### Filters
|
||||
|
||||
* `-C` _command_ or `--prefilter=`_command_: Specify a shell filter command to be run at the start of assembling each tile
|
||||
* `-c` _command_ or `--postfilter=`_command_: Specify a shell filter command to be run at the end of assembling each tile
|
||||
|
||||
The pre- and post-filter commands allow you to do optional filtering or transformation on the features of each tile
|
||||
as it is created. They are shell commands, run with the zoom level, X, and Y as the `$1`, `$2`, and `$3` arguments.
|
||||
Future versions of Tippecanoe may add additional arguments for more context.
|
||||
|
||||
The features are provided to the filter
|
||||
as a series of newline-delimited GeoJSON objects on the standard input, and `tippecanoe` expects to read another
|
||||
set of GeoJSON features from the filter's standard output.
|
||||
|
||||
The prefilter receives the features at the highest available resolution, before line simplification,
|
||||
polygon topology repair, gamma calculation, dynamic feature dropping, or other internal processing.
|
||||
The postfilter receives the features at tile resolution, after simplification, cleaning, and dropping.
|
||||
|
||||
The layer name is provided as part of the `tippecanoe` element of the feature and must be passed through
|
||||
to keep the feature in its correct layer. In the case of the prefilter, the `tippecanoe` element may also
|
||||
contain `index`, `sequence`, and `extent` elements, which must be passed through for internal operations like
|
||||
`--drop-densest-as-needed`, `--drop-smallest-as-needed`, and `--preserve-input-order` to work.
|
||||
|
||||
#### Examples:
|
||||
|
||||
* Make a tileset of the Natural Earth countries to zoom level 5, and also copy the GeoJSON features
|
||||
to files in a `tiles/z/x/y.geojson` directory hierarchy.
|
||||
|
||||
```
|
||||
tippecanoe -o countries.mbtiles -z5 -C 'mkdir -p tiles/$1/$2; tee tiles/$1/$2/$3.geojson' ne_10m_admin_0_countries.json
|
||||
```
|
||||
|
||||
* Make a tileset of the Natural Earth countries to zoom level 5, but including only those tiles that
|
||||
intersect the [bounding box of Germany](https://www.flickr.com/places/info/23424829).
|
||||
(The `limit-tiles-to-bbox` script is [in the Tippecanoe source directory](filters/limit-tiles-to-bbox).)
|
||||
|
||||
```
|
||||
tippecanoe -o countries.mbtiles -z5 -C './filters/limit-tiles-to-bbox 5.8662 47.2702 15.0421 55.0581 $*' ne_10m_admin_0_countries.json
|
||||
```
|
||||
|
||||
* Make a tileset of TIGER roads in Tippecanoe County, leaving out all but primary and secondary roads (as [classified by TIGER](https://www.census.gov/geo/reference/mtfcc.html)) below zoom level 11.
|
||||
|
||||
```
|
||||
tippecanoe -o roads.mbtiles -c 'if [ $1 -lt 11 ]; then grep "\"MTFCC\": \"S1[12]00\""; else cat; fi' tl_2016_18157_roads.json
|
||||
```
|
||||
|
||||
Environment
|
||||
-----------
|
||||
|
||||
@ -510,4 +555,5 @@ resolutions.
|
||||
* `-z` _maxzoom_ or `--maximum-zoom=`*maxzoom*: Specify the highest zoom level to decode from the tileset
|
||||
* `-Z` _minzoom_ or `--minimum-zoom=`*minzoom*: Specify the lowest zoom level to decode from the tileset
|
||||
* `-l` _layer_ or `--layer=`*layer*: Decode only layers with the specified names. (Multiple `-l` options can be specified.)
|
||||
* `-c` or `--tag-layer-and-zoom`: Include each feature's layer and zoom level as part of its `tippecanoe` object rather than as a FeatureCollection wrapper
|
||||
* `-f` or `--force`: Decode tiles even if polygon ring order or closure problems are detected
|
||||
|
442
decode.cpp
442
decode.cpp
@ -17,43 +17,13 @@
|
||||
#include "mvt.hpp"
|
||||
#include "projection.hpp"
|
||||
#include "geometry.hpp"
|
||||
#include "write_json.hpp"
|
||||
|
||||
int minzoom = 0;
|
||||
int maxzoom = 32;
|
||||
bool force = false;
|
||||
|
||||
void printq(const char *s) {
|
||||
putchar('"');
|
||||
for (; *s; s++) {
|
||||
if (*s == '\\' || *s == '"') {
|
||||
printf("\\%c", *s);
|
||||
} else if (*s >= 0 && *s < ' ') {
|
||||
printf("\\u%04x", *s);
|
||||
} else {
|
||||
putchar(*s);
|
||||
}
|
||||
}
|
||||
putchar('"');
|
||||
}
|
||||
|
||||
struct lonlat {
|
||||
int op;
|
||||
double lon;
|
||||
double lat;
|
||||
int x;
|
||||
int y;
|
||||
|
||||
lonlat(int nop, double nlon, double nlat, int nx, int ny) {
|
||||
this->op = nop;
|
||||
this->lon = nlon;
|
||||
this->lat = nlat;
|
||||
this->x = nx;
|
||||
this->y = ny;
|
||||
}
|
||||
};
|
||||
|
||||
void handle(std::string message, int z, unsigned x, unsigned y, int describe, std::set<std::string> const &to_decode) {
|
||||
int within = 0;
|
||||
void handle(std::string message, int z, unsigned x, unsigned y, int describe, std::set<std::string> const &to_decode, bool pipeline) {
|
||||
mvt_tile tile;
|
||||
bool was_compressed;
|
||||
|
||||
@ -67,321 +37,71 @@ void handle(std::string message, int z, unsigned x, unsigned y, int describe, st
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
printf("{ \"type\": \"FeatureCollection\"");
|
||||
if (!pipeline) {
|
||||
printf("{ \"type\": \"FeatureCollection\"");
|
||||
|
||||
if (describe) {
|
||||
printf(", \"properties\": { \"zoom\": %d, \"x\": %d, \"y\": %d", z, x, y);
|
||||
if (!was_compressed) {
|
||||
printf(", \"compressed\": false");
|
||||
if (describe) {
|
||||
printf(", \"properties\": { \"zoom\": %d, \"x\": %d, \"y\": %d", z, x, y);
|
||||
if (!was_compressed) {
|
||||
printf(", \"compressed\": false");
|
||||
}
|
||||
printf(" }");
|
||||
|
||||
if (projection != projections) {
|
||||
printf(", \"crs\": { \"type\": \"name\", \"properties\": { \"name\": ");
|
||||
fprintq(stdout, projection->alias);
|
||||
printf(" } }");
|
||||
}
|
||||
}
|
||||
|
||||
printf(" }");
|
||||
|
||||
if (projection != projections) {
|
||||
printf(", \"crs\": { \"type\": \"name\", \"properties\": { \"name\": ");
|
||||
printq(projection->alias);
|
||||
printf(" } }");
|
||||
}
|
||||
printf(", \"features\": [\n");
|
||||
}
|
||||
|
||||
printf(", \"features\": [\n");
|
||||
|
||||
bool first_layer = true;
|
||||
for (size_t l = 0; l < tile.layers.size(); l++) {
|
||||
mvt_layer &layer = tile.layers[l];
|
||||
int extent = layer.extent;
|
||||
|
||||
if (to_decode.size() != 0 && !to_decode.count(layer.name)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (describe) {
|
||||
if (!first_layer) {
|
||||
printf(",\n");
|
||||
if (!pipeline) {
|
||||
if (describe) {
|
||||
if (!first_layer) {
|
||||
printf(",\n");
|
||||
}
|
||||
|
||||
printf("{ \"type\": \"FeatureCollection\"");
|
||||
printf(", \"properties\": { \"layer\": ");
|
||||
fprintq(stdout, layer.name.c_str());
|
||||
printf(", \"version\": %d, \"extent\": %lld", layer.version, layer.extent);
|
||||
printf(" }");
|
||||
printf(", \"features\": [\n");
|
||||
|
||||
first_layer = false;
|
||||
}
|
||||
|
||||
printf("{ \"type\": \"FeatureCollection\"");
|
||||
printf(", \"properties\": { \"layer\": ");
|
||||
printq(layer.name.c_str());
|
||||
printf(", \"version\": %d, \"extent\": %d", layer.version, layer.extent);
|
||||
printf(" }");
|
||||
printf(", \"features\": [\n");
|
||||
|
||||
first_layer = false;
|
||||
within = 0;
|
||||
}
|
||||
|
||||
for (size_t f = 0; f < layer.features.size(); f++) {
|
||||
mvt_feature &feat = layer.features[f];
|
||||
layer_to_geojson(stdout, layer, z, x, y, !pipeline, pipeline, pipeline, 0, 0, 0, !force);
|
||||
|
||||
if (within) {
|
||||
printf(",\n");
|
||||
if (!pipeline) {
|
||||
if (describe) {
|
||||
printf("] }\n");
|
||||
}
|
||||
within = 1;
|
||||
|
||||
printf("{ \"type\": \"Feature\"");
|
||||
|
||||
if (feat.has_id) {
|
||||
printf(", \"id\": %llu", feat.id);
|
||||
}
|
||||
|
||||
printf(", \"properties\": { ");
|
||||
|
||||
for (size_t t = 0; t + 1 < feat.tags.size(); t += 2) {
|
||||
if (t != 0) {
|
||||
printf(", ");
|
||||
}
|
||||
|
||||
if (feat.tags[t] >= layer.keys.size()) {
|
||||
fprintf(stderr, "Error: out of bounds feature key\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (feat.tags[t + 1] >= layer.values.size()) {
|
||||
fprintf(stderr, "Error: out of bounds feature value\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
const char *key = layer.keys[feat.tags[t]].c_str();
|
||||
mvt_value const &val = layer.values[feat.tags[t + 1]];
|
||||
|
||||
if (val.type == mvt_string) {
|
||||
printq(key);
|
||||
printf(": ");
|
||||
printq(val.string_value.c_str());
|
||||
} else if (val.type == mvt_int) {
|
||||
printq(key);
|
||||
printf(": %lld", (long long) val.numeric_value.int_value);
|
||||
} else if (val.type == mvt_double) {
|
||||
printq(key);
|
||||
double v = val.numeric_value.double_value;
|
||||
if (v == (long long) v) {
|
||||
printf(": %lld", (long long) v);
|
||||
} else {
|
||||
printf(": %g", v);
|
||||
}
|
||||
} else if (val.type == mvt_float) {
|
||||
printq(key);
|
||||
double v = val.numeric_value.float_value;
|
||||
if (v == (long long) v) {
|
||||
printf(": %lld", (long long) v);
|
||||
} else {
|
||||
printf(": %g", v);
|
||||
}
|
||||
} else if (val.type == mvt_sint) {
|
||||
printq(key);
|
||||
printf(": %lld", (long long) val.numeric_value.sint_value);
|
||||
} else if (val.type == mvt_uint) {
|
||||
printq(key);
|
||||
printf(": %lld", (long long) val.numeric_value.uint_value);
|
||||
} else if (val.type == mvt_bool) {
|
||||
printq(key);
|
||||
printf(": %s", val.numeric_value.bool_value ? "true" : "false");
|
||||
}
|
||||
}
|
||||
|
||||
printf(" }, \"geometry\": { ");
|
||||
|
||||
std::vector<lonlat> ops;
|
||||
|
||||
for (size_t g = 0; g < feat.geometry.size(); g++) {
|
||||
int op = feat.geometry[g].op;
|
||||
long long px = feat.geometry[g].x;
|
||||
long long py = feat.geometry[g].y;
|
||||
|
||||
if (op == VT_MOVETO || op == VT_LINETO) {
|
||||
long long scale = 1LL << (32 - z);
|
||||
long long wx = scale * x + (scale / extent) * px;
|
||||
long long wy = scale * y + (scale / extent) * py;
|
||||
|
||||
double lat, lon;
|
||||
projection->unproject(wx, wy, 32, &lon, &lat);
|
||||
|
||||
ops.push_back(lonlat(op, lon, lat, px, py));
|
||||
} else {
|
||||
ops.push_back(lonlat(op, 0, 0, 0, 0));
|
||||
}
|
||||
}
|
||||
|
||||
if (feat.type == VT_POINT) {
|
||||
if (ops.size() == 1) {
|
||||
printf("\"type\": \"Point\", \"coordinates\": [ %f, %f ]", ops[0].lon, ops[0].lat);
|
||||
} else {
|
||||
printf("\"type\": \"MultiPoint\", \"coordinates\": [ ");
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
if (i != 0) {
|
||||
printf(", ");
|
||||
}
|
||||
printf("[ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
}
|
||||
printf(" ]");
|
||||
}
|
||||
} else if (feat.type == VT_LINE) {
|
||||
int movetos = 0;
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
if (ops[i].op == VT_MOVETO) {
|
||||
movetos++;
|
||||
}
|
||||
}
|
||||
|
||||
if (movetos < 2) {
|
||||
printf("\"type\": \"LineString\", \"coordinates\": [ ");
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
if (i != 0) {
|
||||
printf(", ");
|
||||
}
|
||||
printf("[ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
}
|
||||
printf(" ]");
|
||||
} else {
|
||||
printf("\"type\": \"MultiLineString\", \"coordinates\": [ [ ");
|
||||
int state = 0;
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
if (ops[i].op == VT_MOVETO) {
|
||||
if (state == 0) {
|
||||
printf("[ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
state = 1;
|
||||
} else {
|
||||
printf(" ], [ ");
|
||||
printf("[ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
state = 1;
|
||||
}
|
||||
} else {
|
||||
printf(", [ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
}
|
||||
}
|
||||
printf(" ] ]");
|
||||
}
|
||||
} else if (feat.type == VT_POLYGON) {
|
||||
std::vector<std::vector<lonlat> > rings;
|
||||
std::vector<double> areas;
|
||||
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
if (ops[i].op == VT_MOVETO) {
|
||||
rings.push_back(std::vector<lonlat>());
|
||||
areas.push_back(0);
|
||||
}
|
||||
|
||||
int n = rings.size() - 1;
|
||||
if (n >= 0) {
|
||||
if (ops[i].op == VT_CLOSEPATH) {
|
||||
rings[n].push_back(rings[n][0]);
|
||||
} else {
|
||||
rings[n].push_back(ops[i]);
|
||||
}
|
||||
}
|
||||
|
||||
if (i + 1 >= ops.size() || ops[i + 1].op == VT_MOVETO) {
|
||||
if (ops[i].op != VT_CLOSEPATH) {
|
||||
static bool warned = false;
|
||||
|
||||
if (!warned) {
|
||||
fprintf(stderr, "Ring does not end with closepath (ends with %d)\n", ops[i].op);
|
||||
if (!force) {
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
warned = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int outer = 0;
|
||||
|
||||
for (size_t i = 0; i < rings.size(); i++) {
|
||||
long double area = 0;
|
||||
for (size_t k = 0; k < rings[i].size(); k++) {
|
||||
if (rings[i][k].op != VT_CLOSEPATH) {
|
||||
area += rings[i][k].x * rings[i][(k + 1) % rings[i].size()].y;
|
||||
area -= rings[i][k].y * rings[i][(k + 1) % rings[i].size()].x;
|
||||
}
|
||||
}
|
||||
|
||||
areas[i] = area;
|
||||
if (areas[i] >= 0 || i == 0) {
|
||||
outer++;
|
||||
}
|
||||
|
||||
// printf("area %f\n", area / .00000274 / .00000274);
|
||||
}
|
||||
|
||||
if (outer > 1) {
|
||||
printf("\"type\": \"MultiPolygon\", \"coordinates\": [ [ [ ");
|
||||
} else {
|
||||
printf("\"type\": \"Polygon\", \"coordinates\": [ [ ");
|
||||
}
|
||||
|
||||
int state = 0;
|
||||
for (size_t i = 0; i < rings.size(); i++) {
|
||||
if (i == 0 && areas[i] < 0) {
|
||||
static bool warned = false;
|
||||
|
||||
if (!warned) {
|
||||
fprintf(stderr, "Polygon begins with an inner ring\n");
|
||||
if (!force) {
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
warned = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (areas[i] >= 0) {
|
||||
if (state != 0) {
|
||||
// new multipolygon
|
||||
printf(" ] ], [ [ ");
|
||||
}
|
||||
state = 1;
|
||||
}
|
||||
|
||||
if (state == 2) {
|
||||
// new ring in the same polygon
|
||||
printf(" ], [ ");
|
||||
}
|
||||
|
||||
for (size_t j = 0; j < rings[i].size(); j++) {
|
||||
if (rings[i][j].op != VT_CLOSEPATH) {
|
||||
if (j != 0) {
|
||||
printf(", ");
|
||||
}
|
||||
|
||||
printf("[ %f, %f ]", rings[i][j].lon, rings[i][j].lat);
|
||||
} else {
|
||||
if (j != 0) {
|
||||
printf(", ");
|
||||
}
|
||||
|
||||
printf("[ %f, %f ]", rings[i][0].lon, rings[i][0].lat);
|
||||
}
|
||||
}
|
||||
|
||||
state = 2;
|
||||
}
|
||||
|
||||
if (outer > 1) {
|
||||
printf(" ] ] ]");
|
||||
} else {
|
||||
printf(" ] ]");
|
||||
}
|
||||
}
|
||||
|
||||
printf(" } }\n");
|
||||
}
|
||||
|
||||
if (describe) {
|
||||
printf("] }\n");
|
||||
}
|
||||
}
|
||||
|
||||
printf("] }\n");
|
||||
if (!pipeline) {
|
||||
printf("] }\n");
|
||||
}
|
||||
}
|
||||
|
||||
void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> const &to_decode) {
|
||||
void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> const &to_decode, bool pipeline) {
|
||||
sqlite3 *db;
|
||||
int oz = z;
|
||||
unsigned ox = x, oy = y;
|
||||
|
||||
int fd = open(fname, O_RDONLY);
|
||||
int fd = open(fname, O_RDONLY | O_CLOEXEC);
|
||||
if (fd >= 0) {
|
||||
struct stat st;
|
||||
if (fstat(fd, &st) == 0) {
|
||||
@ -391,7 +111,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
if (strcmp(map, "SQLite format 3") != 0) {
|
||||
if (z >= 0) {
|
||||
std::string s = std::string(map, st.st_size);
|
||||
handle(s, z, x, y, 1, to_decode);
|
||||
handle(s, z, x, y, 1, to_decode, pipeline);
|
||||
munmap(map, st.st_size);
|
||||
return;
|
||||
} else {
|
||||
@ -405,7 +125,10 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
} else {
|
||||
perror("fstat");
|
||||
}
|
||||
close(fd);
|
||||
if (close(fd) != 0) {
|
||||
perror("close");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
} else {
|
||||
perror(fname);
|
||||
}
|
||||
@ -416,32 +139,35 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
}
|
||||
|
||||
if (z < 0) {
|
||||
printf("{ \"type\": \"FeatureCollection\", \"properties\": {\n");
|
||||
|
||||
const char *sql2 = "SELECT name, value from metadata order by name;";
|
||||
sqlite3_stmt *stmt2;
|
||||
if (sqlite3_prepare_v2(db, sql2, -1, &stmt2, NULL) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: select failed: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
int within = 0;
|
||||
while (sqlite3_step(stmt2) == SQLITE_ROW) {
|
||||
if (within) {
|
||||
printf(",\n");
|
||||
|
||||
if (!pipeline) {
|
||||
printf("{ \"type\": \"FeatureCollection\", \"properties\": {\n");
|
||||
|
||||
const char *sql2 = "SELECT name, value from metadata order by name;";
|
||||
sqlite3_stmt *stmt2;
|
||||
if (sqlite3_prepare_v2(db, sql2, -1, &stmt2, NULL) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: select failed: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
within = 1;
|
||||
|
||||
const unsigned char *name = sqlite3_column_text(stmt2, 0);
|
||||
const unsigned char *value = sqlite3_column_text(stmt2, 1);
|
||||
while (sqlite3_step(stmt2) == SQLITE_ROW) {
|
||||
if (within) {
|
||||
printf(",\n");
|
||||
}
|
||||
within = 1;
|
||||
|
||||
printq((char *) name);
|
||||
printf(": ");
|
||||
printq((char *) value);
|
||||
const unsigned char *name = sqlite3_column_text(stmt2, 0);
|
||||
const unsigned char *value = sqlite3_column_text(stmt2, 1);
|
||||
|
||||
fprintq(stdout, (char *) name);
|
||||
printf(": ");
|
||||
fprintq(stdout, (char *) value);
|
||||
}
|
||||
|
||||
sqlite3_finalize(stmt2);
|
||||
}
|
||||
|
||||
sqlite3_finalize(stmt2);
|
||||
|
||||
const char *sql = "SELECT tile_data, zoom_level, tile_column, tile_row from tiles where zoom_level between ? and ? order by zoom_level, tile_column, tile_row;";
|
||||
sqlite3_stmt *stmt;
|
||||
if (sqlite3_prepare_v2(db, sql, -1, &stmt, NULL) != SQLITE_OK) {
|
||||
@ -452,14 +178,18 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
sqlite3_bind_int(stmt, 1, minzoom);
|
||||
sqlite3_bind_int(stmt, 2, maxzoom);
|
||||
|
||||
printf("\n}, \"features\": [\n");
|
||||
if (!pipeline) {
|
||||
printf("\n}, \"features\": [\n");
|
||||
}
|
||||
|
||||
within = 0;
|
||||
while (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
if (within) {
|
||||
printf(",\n");
|
||||
if (!pipeline) {
|
||||
if (within) {
|
||||
printf(",\n");
|
||||
}
|
||||
within = 1;
|
||||
}
|
||||
within = 1;
|
||||
|
||||
int len = sqlite3_column_bytes(stmt, 0);
|
||||
int tz = sqlite3_column_int(stmt, 1);
|
||||
@ -468,10 +198,12 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
ty = (1LL << tz) - 1 - ty;
|
||||
const char *s = (const char *) sqlite3_column_blob(stmt, 0);
|
||||
|
||||
handle(std::string(s, len), tz, tx, ty, 1, to_decode);
|
||||
handle(std::string(s, len), tz, tx, ty, 1, to_decode, pipeline);
|
||||
}
|
||||
|
||||
printf("] }\n");
|
||||
if (!pipeline) {
|
||||
printf("] }\n");
|
||||
}
|
||||
|
||||
sqlite3_finalize(stmt);
|
||||
} else {
|
||||
@ -496,7 +228,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
fprintf(stderr, "%s: Warning: using tile %d/%u/%u instead of %d/%u/%u\n", fname, z, x, y, oz, ox, oy);
|
||||
}
|
||||
|
||||
handle(std::string(s, len), z, x, y, 0, to_decode);
|
||||
handle(std::string(s, len), z, x, y, 0, to_decode, pipeline);
|
||||
handled = 1;
|
||||
}
|
||||
|
||||
@ -515,7 +247,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
}
|
||||
|
||||
void usage(char **argv) {
|
||||
fprintf(stderr, "Usage: %s [-t projection] [-Z minzoom] [-z maxzoom] [-l layer ...] file.mbtiles [zoom x y]\n", argv[0]);
|
||||
fprintf(stderr, "Usage: %s [-s projection] [-Z minzoom] [-z maxzoom] [-l layer ...] file.mbtiles [zoom x y]\n", argv[0]);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
@ -524,12 +256,14 @@ int main(int argc, char **argv) {
|
||||
extern char *optarg;
|
||||
int i;
|
||||
std::set<std::string> to_decode;
|
||||
bool pipeline = false;
|
||||
|
||||
struct option long_options[] = {
|
||||
{"projection", required_argument, 0, 's'},
|
||||
{"maximum-zoom", required_argument, 0, 'z'},
|
||||
{"minimum-zoom", required_argument, 0, 'Z'},
|
||||
{"layer", required_argument, 0, 'l'},
|
||||
{"tag-layer-and-zoom", no_argument, 0, 'c'},
|
||||
{"force", no_argument, 0, 'f'},
|
||||
{0, 0, 0, 0},
|
||||
};
|
||||
@ -566,6 +300,10 @@ int main(int argc, char **argv) {
|
||||
to_decode.insert(optarg);
|
||||
break;
|
||||
|
||||
case 'c':
|
||||
pipeline = true;
|
||||
break;
|
||||
|
||||
case 'f':
|
||||
force = true;
|
||||
break;
|
||||
@ -576,9 +314,9 @@ int main(int argc, char **argv) {
|
||||
}
|
||||
|
||||
if (argc == optind + 4) {
|
||||
decode(argv[optind], atoi(argv[optind + 1]), atoi(argv[optind + 2]), atoi(argv[optind + 3]), to_decode);
|
||||
decode(argv[optind], atoi(argv[optind + 1]), atoi(argv[optind + 2]), atoi(argv[optind + 3]), to_decode, pipeline);
|
||||
} else if (argc == optind + 1) {
|
||||
decode(argv[optind], -1, -1, -1, to_decode);
|
||||
decode(argv[optind], -1, -1, -1, to_decode, pipeline);
|
||||
} else {
|
||||
usage(argv);
|
||||
}
|
||||
|
27
filters/limit-tiles-to-bbox
Executable file
27
filters/limit-tiles-to-bbox
Executable file
@ -0,0 +1,27 @@
|
||||
#!/usr/bin/perl
|
||||
|
||||
use Math::Trig;
|
||||
use strict;
|
||||
|
||||
# http://wiki.openstreetmap.org/wiki/Slippy_map_tilenames
|
||||
sub getTileNumber {
|
||||
my ($lat, $lon, $zoom) = @_;
|
||||
my $xtile = int(($lon + 180) / 360 * 2 ** $zoom);
|
||||
my $ytile = int((1 - log(tan(deg2rad($lat)) + sec(deg2rad($lat))) / pi) / 2 * 2 ** $zoom);
|
||||
return ($xtile, $ytile);
|
||||
}
|
||||
|
||||
my ($minlon, $minlat, $maxlon, $maxlat, $z, $x, $y) = @ARGV;
|
||||
|
||||
my ($x1, $y1) = getTileNumber($maxlat, $minlon, $z);
|
||||
my ($x2, $y2) = getTileNumber($minlat, $maxlon, $z);
|
||||
|
||||
if ($x >= $x1 && $x <= $x2 && $y >= $y1 && $y <= $y2) {
|
||||
while (<STDIN>) {
|
||||
print;
|
||||
}
|
||||
} else {
|
||||
while (<STDIN>) {
|
||||
|
||||
}
|
||||
}
|
274
geojson.cpp
274
geojson.cpp
@ -35,84 +35,16 @@
|
||||
#include "options.hpp"
|
||||
#include "serial.hpp"
|
||||
#include "text.hpp"
|
||||
#include "read_json.hpp"
|
||||
#include "mvt.hpp"
|
||||
|
||||
#define GEOM_POINT 0 /* array of positions */
|
||||
#define GEOM_MULTIPOINT 1 /* array of arrays of positions */
|
||||
#define GEOM_LINESTRING 2 /* array of arrays of positions */
|
||||
#define GEOM_MULTILINESTRING 3 /* array of arrays of arrays of positions */
|
||||
#define GEOM_POLYGON 4 /* array of arrays of arrays of positions */
|
||||
#define GEOM_MULTIPOLYGON 5 /* array of arrays of arrays of arrays of positions */
|
||||
#define GEOM_TYPES 6
|
||||
static long long parse_geometry1(int t, json_object *j, long long *bbox, drawvec &geom, int op, const char *fname, int line, int *initialized, unsigned *initial_x, unsigned *initial_y, json_object *feature, long long &prev, long long &offset, bool &has_prev) {
|
||||
parse_geometry(t, j, geom, op, fname, line, feature);
|
||||
|
||||
static const char *geometry_names[GEOM_TYPES] = {
|
||||
"Point", "MultiPoint", "LineString", "MultiLineString", "Polygon", "MultiPolygon",
|
||||
};
|
||||
|
||||
static int geometry_within[GEOM_TYPES] = {
|
||||
-1, /* point */
|
||||
GEOM_POINT, /* multipoint */
|
||||
GEOM_POINT, /* linestring */
|
||||
GEOM_LINESTRING, /* multilinestring */
|
||||
GEOM_LINESTRING, /* polygon */
|
||||
GEOM_POLYGON, /* multipolygon */
|
||||
};
|
||||
|
||||
static int mb_geometry[GEOM_TYPES] = {
|
||||
VT_POINT, VT_POINT, VT_LINE, VT_LINE, VT_POLYGON, VT_POLYGON,
|
||||
};
|
||||
|
||||
void json_context(json_object *j) {
|
||||
char *s = json_stringify(j);
|
||||
|
||||
if (strlen(s) >= 500) {
|
||||
sprintf(s + 497, "...");
|
||||
}
|
||||
|
||||
fprintf(stderr, "In JSON object %s\n", s);
|
||||
free(s); // stringify
|
||||
}
|
||||
|
||||
long long parse_geometry(int t, json_object *j, long long *bbox, drawvec &out, int op, const char *fname, int line, int *initialized, unsigned *initial_x, unsigned *initial_y, json_object *feature, long long &prev, long long &offset, bool &has_prev) {
|
||||
long long g = 0;
|
||||
|
||||
if (j == NULL || j->type != JSON_ARRAY) {
|
||||
fprintf(stderr, "%s:%d: expected array for type %d\n", fname, line, t);
|
||||
json_context(feature);
|
||||
return g;
|
||||
}
|
||||
|
||||
int within = geometry_within[t];
|
||||
if (within >= 0) {
|
||||
size_t i;
|
||||
for (i = 0; i < j->length; i++) {
|
||||
if (within == GEOM_POINT) {
|
||||
if (i == 0 || mb_geometry[t] == GEOM_MULTIPOINT) {
|
||||
op = VT_MOVETO;
|
||||
} else {
|
||||
op = VT_LINETO;
|
||||
}
|
||||
}
|
||||
|
||||
g += parse_geometry(within, j->array[i], bbox, out, op, fname, line, initialized, initial_x, initial_y, feature, prev, offset, has_prev);
|
||||
}
|
||||
} else {
|
||||
if (j->length >= 2 && j->array[0]->type == JSON_NUMBER && j->array[1]->type == JSON_NUMBER) {
|
||||
long long x, y;
|
||||
double lon = j->array[0]->number;
|
||||
double lat = j->array[1]->number;
|
||||
projection->project(lon, lat, 32, &x, &y);
|
||||
|
||||
if (j->length > 2) {
|
||||
static int warned = 0;
|
||||
|
||||
if (!warned) {
|
||||
fprintf(stderr, "%s:%d: ignoring dimensions beyond two\n", fname, line);
|
||||
json_context(j);
|
||||
json_context(feature);
|
||||
warned = 1;
|
||||
}
|
||||
}
|
||||
for (size_t i = 0; i < geom.size(); i++) {
|
||||
if (geom[i].op == VT_MOVETO || geom[i].op == VT_LINETO) {
|
||||
long long x = geom[i].x;
|
||||
long long y = geom[i].y;
|
||||
|
||||
if (additional[A_DETECT_WRAPAROUND]) {
|
||||
x += offset;
|
||||
@ -155,33 +87,15 @@ long long parse_geometry(int t, json_object *j, long long *bbox, drawvec &out, i
|
||||
*initialized = 1;
|
||||
}
|
||||
|
||||
draw d(op, (x >> geometry_scale), (y >> geometry_scale));
|
||||
out.push_back(d);
|
||||
g++;
|
||||
} else {
|
||||
fprintf(stderr, "%s:%d: malformed point\n", fname, line);
|
||||
json_context(j);
|
||||
json_context(feature);
|
||||
geom[i].x = x >> geometry_scale;
|
||||
geom[i].y = y >> geometry_scale;
|
||||
}
|
||||
}
|
||||
|
||||
if (t == GEOM_POLYGON) {
|
||||
// Note that this is not using the correct meaning of closepath.
|
||||
//
|
||||
// We are using it here to close an entire Polygon, to distinguish
|
||||
// the Polygons within a MultiPolygon from each other.
|
||||
//
|
||||
// This will be undone in fix_polygon(), which needs to know which
|
||||
// rings come from which Polygons so that it can make the winding order
|
||||
// of the outer ring be the opposite of the order of the inner rings.
|
||||
|
||||
out.push_back(draw(VT_CLOSEPATH, 0, 0));
|
||||
}
|
||||
|
||||
return g;
|
||||
return geom.size();
|
||||
}
|
||||
|
||||
int serialize_geometry(json_object *geometry, json_object *properties, json_object *id, const char *reading, int line, volatile long long *layer_seq, volatile long long *progress_seq, long long *metapos, long long *geompos, long long *indexpos, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, const char *fname, int basezoom, int layer, double droprate, long long *file_bbox, json_object *tippecanoe, int segment, int *initialized, unsigned *initial_x, unsigned *initial_y, struct reader *readers, int maxzoom, json_object *feature, std::map<std::string, layermap_entry> *layermap, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, double *dist_sum, size_t *dist_count, bool want_dist) {
|
||||
int serialize_geometry(json_object *geometry, json_object *properties, json_object *id, const char *reading, int line, volatile long long *layer_seq, volatile long long *progress_seq, long long *metapos, long long *geompos, long long *indexpos, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, const char *fname, int basezoom, int layer, double droprate, long long *file_bbox, json_object *tippecanoe, int segment, int *initialized, unsigned *initial_x, unsigned *initial_y, struct reader *readers, int maxzoom, json_object *feature, std::map<std::string, layermap_entry> *layermap, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
|
||||
json_object *geometry_type = json_hash_get(geometry, "type");
|
||||
if (geometry_type == NULL) {
|
||||
static int warned = 0;
|
||||
@ -286,36 +200,38 @@ int serialize_geometry(json_object *geometry, json_object *properties, json_obje
|
||||
|
||||
long long bbox[] = {LLONG_MAX, LLONG_MAX, LLONG_MIN, LLONG_MIN};
|
||||
|
||||
if (tippecanoe_layername.size() != 0) {
|
||||
if (layermap->count(tippecanoe_layername) == 0) {
|
||||
layermap->insert(std::pair<std::string, layermap_entry>(tippecanoe_layername, layermap_entry(layermap->size())));
|
||||
}
|
||||
if (!filters) {
|
||||
if (tippecanoe_layername.size() != 0) {
|
||||
if (layermap->count(tippecanoe_layername) == 0) {
|
||||
layermap->insert(std::pair<std::string, layermap_entry>(tippecanoe_layername, layermap_entry(layermap->size())));
|
||||
}
|
||||
|
||||
auto ai = layermap->find(tippecanoe_layername);
|
||||
if (ai != layermap->end()) {
|
||||
layer = ai->second.id;
|
||||
layername = tippecanoe_layername;
|
||||
auto ai = layermap->find(tippecanoe_layername);
|
||||
if (ai != layermap->end()) {
|
||||
layer = ai->second.id;
|
||||
layername = tippecanoe_layername;
|
||||
|
||||
if (mb_geometry[t] == VT_POINT) {
|
||||
ai->second.points++;
|
||||
} else if (mb_geometry[t] == VT_LINE) {
|
||||
ai->second.lines++;
|
||||
} else if (mb_geometry[t] == VT_POLYGON) {
|
||||
ai->second.polygons++;
|
||||
if (mb_geometry[t] == VT_POINT) {
|
||||
ai->second.points++;
|
||||
} else if (mb_geometry[t] == VT_LINE) {
|
||||
ai->second.lines++;
|
||||
} else if (mb_geometry[t] == VT_POLYGON) {
|
||||
ai->second.polygons++;
|
||||
}
|
||||
} else {
|
||||
fprintf(stderr, "Internal error: can't find layer name %s\n", tippecanoe_layername.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
} else {
|
||||
fprintf(stderr, "Internal error: can't find layer name %s\n", tippecanoe_layername.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
} else {
|
||||
auto fk = layermap->find(layername);
|
||||
if (fk != layermap->end()) {
|
||||
if (mb_geometry[t] == VT_POINT) {
|
||||
fk->second.points++;
|
||||
} else if (mb_geometry[t] == VT_LINE) {
|
||||
fk->second.lines++;
|
||||
} else if (mb_geometry[t] == VT_POLYGON) {
|
||||
fk->second.polygons++;
|
||||
auto fk = layermap->find(layername);
|
||||
if (fk != layermap->end()) {
|
||||
if (mb_geometry[t] == VT_POINT) {
|
||||
fk->second.points++;
|
||||
} else if (mb_geometry[t] == VT_LINE) {
|
||||
fk->second.lines++;
|
||||
} else if (mb_geometry[t] == VT_POLYGON) {
|
||||
fk->second.polygons++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -343,98 +259,24 @@ int serialize_geometry(json_object *geometry, json_object *properties, json_obje
|
||||
continue;
|
||||
}
|
||||
|
||||
type_and_string tas;
|
||||
tas.string = s;
|
||||
tas.type = -1;
|
||||
int type = -1;
|
||||
std::string val;
|
||||
stringify_value(properties->values[i], type, val, reading, line, feature, properties->keys[i]->string, attribute_types);
|
||||
|
||||
metakey[m] = properties->keys[i]->string;
|
||||
if (type >= 0) {
|
||||
metakey[m] = properties->keys[i]->string;
|
||||
metatype[m] = type;
|
||||
metaval[m] = val;
|
||||
m++;
|
||||
|
||||
if (properties->values[i] != NULL) {
|
||||
int vt = properties->values[i]->type;
|
||||
std::string val;
|
||||
|
||||
if (vt == JSON_STRING || vt == JSON_NUMBER) {
|
||||
val = properties->values[i]->string;
|
||||
} else if (vt == JSON_TRUE) {
|
||||
val = "true";
|
||||
} else if (vt == JSON_FALSE) {
|
||||
val = "false";
|
||||
} else if (vt == JSON_NULL) {
|
||||
val = "null";
|
||||
} else {
|
||||
const char *v = json_stringify(properties->values[i]);
|
||||
val = std::string(v);
|
||||
free((void *) v); // stringify
|
||||
}
|
||||
|
||||
auto a = (*attribute_types).find(properties->keys[i]->string);
|
||||
if (a != attribute_types->end()) {
|
||||
if (a->second == mvt_string) {
|
||||
vt = JSON_STRING;
|
||||
} else if (a->second == mvt_float) {
|
||||
vt = JSON_NUMBER;
|
||||
val = std::to_string(atof(val.c_str()));
|
||||
} else if (a->second == mvt_int) {
|
||||
vt = JSON_NUMBER;
|
||||
if (val.size() == 0) {
|
||||
val = "0";
|
||||
}
|
||||
|
||||
for (size_t ii = 0; ii < val.size(); ii++) {
|
||||
char c = val[ii];
|
||||
if (c < '0' || c > '9') {
|
||||
val = std::to_string(round(atof(val.c_str())));
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (a->second == mvt_bool) {
|
||||
if (val == "false" || val == "0" || val == "null" || val.size() == 0) {
|
||||
vt = JSON_FALSE;
|
||||
val = "false";
|
||||
} else {
|
||||
vt = JSON_TRUE;
|
||||
val = "true";
|
||||
}
|
||||
} else {
|
||||
fprintf(stderr, "Can't happen: attribute type %d\n", a->second);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
if (vt == JSON_STRING) {
|
||||
tas.type = metatype[m] = mvt_string;
|
||||
metaval[m] = val;
|
||||
std::string err = check_utf8(metaval[m]);
|
||||
if (err != "") {
|
||||
fprintf(stderr, "%s:%d: %s\n", reading, line, err.c_str());
|
||||
json_context(feature);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
m++;
|
||||
} else if (vt == JSON_NUMBER) {
|
||||
tas.type = metatype[m] = mvt_double;
|
||||
metaval[m] = val;
|
||||
m++;
|
||||
} else if (vt == JSON_TRUE || vt == JSON_FALSE) {
|
||||
tas.type = metatype[m] = mvt_bool;
|
||||
metaval[m] = val;
|
||||
m++;
|
||||
} else if (vt == JSON_NULL) {
|
||||
;
|
||||
} else {
|
||||
tas.type = metatype[m] = mvt_string;
|
||||
metaval[m] = val;
|
||||
m++;
|
||||
}
|
||||
}
|
||||
|
||||
if (tas.type >= 0) {
|
||||
type_and_string attrib;
|
||||
attrib.type = metatype[m - 1];
|
||||
attrib.string = metaval[m - 1];
|
||||
|
||||
auto fk = layermap->find(layername);
|
||||
add_to_file_keys(fk->second.file_keys, metakey[m - 1], attrib);
|
||||
if (!filters) {
|
||||
auto fk = layermap->find(layername);
|
||||
add_to_file_keys(fk->second.file_keys, metakey[m - 1], attrib);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -444,7 +286,7 @@ int serialize_geometry(json_object *geometry, json_object *properties, json_obje
|
||||
long long offset = 0;
|
||||
|
||||
drawvec dv;
|
||||
long long g = parse_geometry(t, coordinates, bbox, dv, VT_MOVETO, fname, line, initialized, initial_x, initial_y, feature, prev, offset, has_prev);
|
||||
long long g = parse_geometry1(t, coordinates, bbox, dv, VT_MOVETO, fname, line, initialized, initial_x, initial_y, feature, prev, offset, has_prev);
|
||||
if (mb_geometry[t] == VT_POLYGON) {
|
||||
dv = fix_polygon(dv);
|
||||
}
|
||||
@ -558,6 +400,8 @@ int serialize_geometry(json_object *geometry, json_object *properties, json_obje
|
||||
|
||||
if (additional[A_DROP_DENSEST_AS_NEEDED] || additional[A_CALCULATE_FEATURE_DENSITY] || additional[A_INCREASE_GAMMA_AS_NEEDED] || uses_gamma) {
|
||||
sf.index = bbox_index;
|
||||
} else {
|
||||
sf.index = 0;
|
||||
}
|
||||
|
||||
if (inline_meta) {
|
||||
@ -626,7 +470,7 @@ void check_crs(json_object *j, const char *reading) {
|
||||
}
|
||||
}
|
||||
|
||||
void parse_json(json_pull *jp, const char *reading, volatile long long *layer_seq, volatile long long *progress_seq, long long *metapos, long long *geompos, long long *indexpos, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, char *fname, int basezoom, int layer, double droprate, long long *file_bbox, int segment, int *initialized, unsigned *initial_x, unsigned *initial_y, struct reader *readers, int maxzoom, std::map<std::string, layermap_entry> *layermap, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, double *dist_sum, size_t *dist_count, bool want_dist) {
|
||||
void parse_json(json_pull *jp, const char *reading, volatile long long *layer_seq, volatile long long *progress_seq, long long *metapos, long long *geompos, long long *indexpos, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, char *fname, int basezoom, int layer, double droprate, long long *file_bbox, int segment, int *initialized, unsigned *initial_x, unsigned *initial_y, struct reader *readers, int maxzoom, std::map<std::string, layermap_entry> *layermap, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
|
||||
long long found_hashes = 0;
|
||||
long long found_features = 0;
|
||||
long long found_geometries = 0;
|
||||
@ -694,7 +538,7 @@ void parse_json(json_pull *jp, const char *reading, volatile long long *layer_se
|
||||
}
|
||||
found_geometries++;
|
||||
|
||||
serialize_geometry(j, NULL, NULL, reading, jp->line, layer_seq, progress_seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, basezoom, layer, droprate, file_bbox, NULL, segment, initialized, initial_x, initial_y, readers, maxzoom, j, layermap, layername, uses_gamma, attribute_types, dist_sum, dist_count, want_dist);
|
||||
serialize_geometry(j, NULL, NULL, reading, jp->line, layer_seq, progress_seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, basezoom, layer, droprate, file_bbox, NULL, segment, initialized, initial_x, initial_y, readers, maxzoom, j, layermap, layername, uses_gamma, attribute_types, dist_sum, dist_count, want_dist, filters);
|
||||
json_free(j);
|
||||
continue;
|
||||
}
|
||||
@ -737,10 +581,10 @@ void parse_json(json_pull *jp, const char *reading, volatile long long *layer_se
|
||||
if (geometries != NULL) {
|
||||
size_t g;
|
||||
for (g = 0; g < geometries->length; g++) {
|
||||
serialize_geometry(geometries->array[g], properties, id, reading, jp->line, layer_seq, progress_seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, basezoom, layer, droprate, file_bbox, tippecanoe, segment, initialized, initial_x, initial_y, readers, maxzoom, j, layermap, layername, uses_gamma, attribute_types, dist_sum, dist_count, want_dist);
|
||||
serialize_geometry(geometries->array[g], properties, id, reading, jp->line, layer_seq, progress_seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, basezoom, layer, droprate, file_bbox, tippecanoe, segment, initialized, initial_x, initial_y, readers, maxzoom, j, layermap, layername, uses_gamma, attribute_types, dist_sum, dist_count, want_dist, filters);
|
||||
}
|
||||
} else {
|
||||
serialize_geometry(geometry, properties, id, reading, jp->line, layer_seq, progress_seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, basezoom, layer, droprate, file_bbox, tippecanoe, segment, initialized, initial_x, initial_y, readers, maxzoom, j, layermap, layername, uses_gamma, attribute_types, dist_sum, dist_count, want_dist);
|
||||
serialize_geometry(geometry, properties, id, reading, jp->line, layer_seq, progress_seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, basezoom, layer, droprate, file_bbox, tippecanoe, segment, initialized, initial_x, initial_y, readers, maxzoom, j, layermap, layername, uses_gamma, attribute_types, dist_sum, dist_count, want_dist, filters);
|
||||
}
|
||||
|
||||
json_free(j);
|
||||
@ -752,7 +596,7 @@ void parse_json(json_pull *jp, const char *reading, volatile long long *layer_se
|
||||
void *run_parse_json(void *v) {
|
||||
struct parse_json_args *pja = (struct parse_json_args *) v;
|
||||
|
||||
parse_json(pja->jp, pja->reading, pja->layer_seq, pja->progress_seq, pja->metapos, pja->geompos, pja->indexpos, pja->exclude, pja->include, pja->exclude_all, pja->metafile, pja->geomfile, pja->indexfile, pja->poolfile, pja->treefile, pja->fname, pja->basezoom, pja->layer, pja->droprate, pja->file_bbox, pja->segment, pja->initialized, pja->initial_x, pja->initial_y, pja->readers, pja->maxzoom, pja->layermap, *pja->layername, pja->uses_gamma, pja->attribute_types, pja->dist_sum, pja->dist_count, pja->want_dist);
|
||||
parse_json(pja->jp, pja->reading, pja->layer_seq, pja->progress_seq, pja->metapos, pja->geompos, pja->indexpos, pja->exclude, pja->include, pja->exclude_all, pja->metafile, pja->geomfile, pja->indexfile, pja->poolfile, pja->treefile, pja->fname, pja->basezoom, pja->layer, pja->droprate, pja->file_bbox, pja->segment, pja->initialized, pja->initial_x, pja->initial_y, pja->readers, pja->maxzoom, pja->layermap, *pja->layername, pja->uses_gamma, pja->attribute_types, pja->dist_sum, pja->dist_count, pja->want_dist, pja->filters);
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
@ -42,12 +42,13 @@ struct parse_json_args {
|
||||
double *dist_sum;
|
||||
size_t *dist_count;
|
||||
bool want_dist;
|
||||
bool filters;
|
||||
};
|
||||
|
||||
struct json_pull *json_begin_map(char *map, long long len);
|
||||
void json_end_map(struct json_pull *jp);
|
||||
|
||||
void parse_json(json_pull *jp, const char *reading, volatile long long *layer_seq, volatile long long *progress_seq, long long *metapos, long long *geompos, long long *indexpos, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, char *fname, int basezoom, int layer, double droprate, long long *file_bbox, int segment, int *initialized, unsigned *initial_x, unsigned *initial_y, struct reader *readers, int maxzoom, std::map<std::string, layermap_entry> *layermap, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, double *dist_sum, size_t *dist_count, bool want_dist);
|
||||
void parse_json(json_pull *jp, const char *reading, volatile long long *layer_seq, volatile long long *progress_seq, long long *metapos, long long *geompos, long long *indexpos, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, char *fname, int basezoom, int layer, double droprate, long long *file_bbox, int segment, int *initialized, unsigned *initial_x, unsigned *initial_y, struct reader *readers, int maxzoom, std::map<std::string, layermap_entry> *layermap, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, double *dist_sum, size_t *dist_count, bool want_dist, bool filters);
|
||||
void *run_parse_json(void *v);
|
||||
|
||||
#endif
|
||||
|
13
geometry.cpp
13
geometry.cpp
@ -3,6 +3,7 @@
|
||||
#include <string>
|
||||
#include <stack>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <algorithm>
|
||||
#include <cstdio>
|
||||
#include <unistd.h>
|
||||
@ -22,7 +23,7 @@
|
||||
static int pnpoly(drawvec &vert, size_t start, size_t nvert, long long testx, long long testy);
|
||||
static int clip(double *x0, double *y0, double *x1, double *y1, double xmin, double ymin, double xmax, double ymax);
|
||||
|
||||
drawvec decode_geometry(FILE *meta, long long *geompos, int z, unsigned tx, unsigned ty, int detail, long long *bbox, unsigned initial_x, unsigned initial_y) {
|
||||
drawvec decode_geometry(FILE *meta, long long *geompos, int z, unsigned tx, unsigned ty, long long *bbox, unsigned initial_x, unsigned initial_y) {
|
||||
drawvec out;
|
||||
|
||||
bbox[0] = LLONG_MAX;
|
||||
@ -506,7 +507,7 @@ drawvec simple_clip_poly(drawvec &geom, long long minx, long long miny, long lon
|
||||
return out;
|
||||
}
|
||||
|
||||
drawvec simple_clip_poly(drawvec &geom, int z, int detail, int buffer) {
|
||||
drawvec simple_clip_poly(drawvec &geom, int z, int buffer) {
|
||||
long long area = 1LL << (32 - z);
|
||||
long long clip_buffer = buffer * area / 256;
|
||||
|
||||
@ -592,7 +593,7 @@ drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double
|
||||
return out;
|
||||
}
|
||||
|
||||
drawvec clip_point(drawvec &geom, int z, int detail, long long buffer) {
|
||||
drawvec clip_point(drawvec &geom, int z, long long buffer) {
|
||||
drawvec out;
|
||||
|
||||
long long min = 0;
|
||||
@ -610,7 +611,7 @@ drawvec clip_point(drawvec &geom, int z, int detail, long long buffer) {
|
||||
return out;
|
||||
}
|
||||
|
||||
int quick_check(long long *bbox, int z, int detail, long long buffer) {
|
||||
int quick_check(long long *bbox, int z, long long buffer) {
|
||||
long long min = 0;
|
||||
long long area = 1LL << (32 - z);
|
||||
|
||||
@ -634,7 +635,7 @@ int quick_check(long long *bbox, int z, int detail, long long buffer) {
|
||||
return 2;
|
||||
}
|
||||
|
||||
bool point_within_tile(long long x, long long y, int z, int detail, long long buffer) {
|
||||
bool point_within_tile(long long x, long long y, int z, long long buffer) {
|
||||
// No adjustment for buffer, because the point must be
|
||||
// strictly within the tile to appear exactly once
|
||||
|
||||
@ -643,7 +644,7 @@ bool point_within_tile(long long x, long long y, int z, int detail, long long bu
|
||||
return x >= 0 && y >= 0 && x < area && y < area;
|
||||
}
|
||||
|
||||
drawvec clip_lines(drawvec &geom, int z, int detail, long long buffer) {
|
||||
drawvec clip_lines(drawvec &geom, int z, long long buffer) {
|
||||
drawvec out;
|
||||
|
||||
long long min = 0;
|
||||
|
12
geometry.hpp
12
geometry.hpp
@ -55,18 +55,18 @@ struct draw {
|
||||
|
||||
typedef std::vector<draw> drawvec;
|
||||
|
||||
drawvec decode_geometry(FILE *meta, long long *geompos, int z, unsigned tx, unsigned ty, int detail, long long *bbox, unsigned initial_x, unsigned initial_y);
|
||||
drawvec decode_geometry(FILE *meta, long long *geompos, int z, unsigned tx, unsigned ty, long long *bbox, unsigned initial_x, unsigned initial_y);
|
||||
void to_tile_scale(drawvec &geom, int z, int detail);
|
||||
drawvec remove_noop(drawvec geom, int type, int shift);
|
||||
drawvec clip_point(drawvec &geom, int z, int detail, long long buffer);
|
||||
drawvec clip_point(drawvec &geom, int z, long long buffer);
|
||||
drawvec clean_or_clip_poly(drawvec &geom, int z, int detail, int buffer, bool clip);
|
||||
drawvec simple_clip_poly(drawvec &geom, int z, int detail, int buffer);
|
||||
drawvec simple_clip_poly(drawvec &geom, int z, int buffer);
|
||||
drawvec close_poly(drawvec &geom);
|
||||
drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double *accum_area);
|
||||
drawvec clip_lines(drawvec &geom, int z, int detail, long long buffer);
|
||||
drawvec clip_lines(drawvec &geom, int z, long long buffer);
|
||||
drawvec stairstep(drawvec &geom, int z, int detail);
|
||||
bool point_within_tile(long long x, long long y, int z, int detail, long long buffer);
|
||||
int quick_check(long long *bbox, int z, int detail, long long buffer);
|
||||
bool point_within_tile(long long x, long long y, int z, long long buffer);
|
||||
int quick_check(long long *bbox, int z, long long buffer);
|
||||
drawvec simplify_lines(drawvec &geom, int z, int detail, bool mark_tile_bounds, double simplification, size_t retain);
|
||||
drawvec reorder_lines(drawvec &geom);
|
||||
drawvec fix_polygon(drawvec &geom);
|
||||
|
139
main.cpp
139
main.cpp
@ -2,6 +2,10 @@
|
||||
#include <mcheck.h>
|
||||
#endif
|
||||
|
||||
#ifdef __APPLE__
|
||||
#define _DARWIN_UNLIMITED_STREAMS
|
||||
#endif
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <math.h>
|
||||
@ -20,6 +24,7 @@
|
||||
#include <sys/resource.h>
|
||||
#include <pthread.h>
|
||||
#include <getopt.h>
|
||||
#include <signal.h>
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
@ -156,7 +161,7 @@ void init_cpus() {
|
||||
long long fds[MAX_FILES];
|
||||
long long i;
|
||||
for (i = 0; i < MAX_FILES; i++) {
|
||||
fds[i] = open("/dev/null", O_RDONLY);
|
||||
fds[i] = open("/dev/null", O_RDONLY | O_CLOEXEC);
|
||||
if (fds[i] < 0) {
|
||||
break;
|
||||
}
|
||||
@ -369,7 +374,7 @@ void *run_sort(void *v) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void do_read_parallel(char *map, long long len, long long initial_offset, const char *reading, struct reader *reader, volatile long long *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, char *fname, int basezoom, int source, int nlayers, std::vector<std::map<std::string, layermap_entry> > *layermaps, double droprate, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist) {
|
||||
void do_read_parallel(char *map, long long len, long long initial_offset, const char *reading, struct reader *reader, volatile long long *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, char *fname, int basezoom, int source, int nlayers, std::vector<std::map<std::string, layermap_entry> > *layermaps, double droprate, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
|
||||
long long segs[CPUS + 1];
|
||||
segs[0] = 0;
|
||||
segs[CPUS] = len;
|
||||
@ -435,6 +440,7 @@ void do_read_parallel(char *map, long long len, long long initial_offset, const
|
||||
pja[i].dist_sum = &(dist_sums[i]);
|
||||
pja[i].dist_count = &(dist_counts[i]);
|
||||
pja[i].want_dist = want_dist;
|
||||
pja[i].filters = filters;
|
||||
|
||||
if (pthread_create(&pthreads[i], NULL, run_parse_json, &pja[i]) != 0) {
|
||||
perror("pthread_create");
|
||||
@ -486,6 +492,7 @@ struct read_parallel_arg {
|
||||
double *dist_sum;
|
||||
size_t *dist_count;
|
||||
bool want_dist;
|
||||
bool filters;
|
||||
};
|
||||
|
||||
void *run_read_parallel(void *v) {
|
||||
@ -507,7 +514,7 @@ void *run_read_parallel(void *v) {
|
||||
}
|
||||
madvise(map, rpa->len, MADV_RANDOM); // sequential, but from several pointers at once
|
||||
|
||||
do_read_parallel(map, rpa->len, rpa->offset, rpa->reading, rpa->reader, rpa->progress_seq, rpa->exclude, rpa->include, rpa->exclude_all, rpa->fname, rpa->basezoom, rpa->source, rpa->nlayers, rpa->layermaps, rpa->droprate, rpa->initialized, rpa->initial_x, rpa->initial_y, rpa->maxzoom, rpa->layername, rpa->uses_gamma, rpa->attribute_types, rpa->separator, rpa->dist_sum, rpa->dist_count, rpa->want_dist);
|
||||
do_read_parallel(map, rpa->len, rpa->offset, rpa->reading, rpa->reader, rpa->progress_seq, rpa->exclude, rpa->include, rpa->exclude_all, rpa->fname, rpa->basezoom, rpa->source, rpa->nlayers, rpa->layermaps, rpa->droprate, rpa->initialized, rpa->initial_x, rpa->initial_y, rpa->maxzoom, rpa->layername, rpa->uses_gamma, rpa->attribute_types, rpa->separator, rpa->dist_sum, rpa->dist_count, rpa->want_dist, rpa->filters);
|
||||
|
||||
madvise(map, rpa->len, MADV_DONTNEED);
|
||||
if (munmap(map, rpa->len) != 0) {
|
||||
@ -524,7 +531,7 @@ void *run_read_parallel(void *v) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void start_parsing(int fd, FILE *fp, long long offset, long long len, volatile int *is_parsing, pthread_t *parallel_parser, bool &parser_created, const char *reading, struct reader *reader, volatile long long *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, char *fname, int basezoom, int source, int nlayers, std::vector<std::map<std::string, layermap_entry> > &layermaps, double droprate, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist) {
|
||||
void start_parsing(int fd, FILE *fp, long long offset, long long len, volatile int *is_parsing, pthread_t *parallel_parser, bool &parser_created, const char *reading, struct reader *reader, volatile long long *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, char *fname, int basezoom, int source, int nlayers, std::vector<std::map<std::string, layermap_entry> > &layermaps, double droprate, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
|
||||
// This has to kick off an intermediate thread to start the parser threads,
|
||||
// so the main thread can get back to reading the next input stage while
|
||||
// the intermediate thread waits for the completion of the parser threads.
|
||||
@ -566,6 +573,7 @@ void start_parsing(int fd, FILE *fp, long long offset, long long len, volatile i
|
||||
rpa->dist_sum = dist_sum;
|
||||
rpa->dist_count = dist_count;
|
||||
rpa->want_dist = want_dist;
|
||||
rpa->filters = filters;
|
||||
|
||||
if (pthread_create(parallel_parser, NULL, run_read_parallel, rpa) != 0) {
|
||||
perror("pthread_create");
|
||||
@ -594,23 +602,23 @@ void radix1(int *geomfds_in, int *indexfds_in, int inputs, int prefix, int split
|
||||
char indexname[strlen(tmpdir) + strlen("/index.XXXXXXXX") + 1];
|
||||
sprintf(indexname, "%s%s", tmpdir, "/index.XXXXXXXX");
|
||||
|
||||
geomfds[i] = mkstemp(geomname);
|
||||
geomfds[i] = mkstemp_cloexec(geomname);
|
||||
if (geomfds[i] < 0) {
|
||||
perror(geomname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
indexfds[i] = mkstemp(indexname);
|
||||
indexfds[i] = mkstemp_cloexec(indexname);
|
||||
if (indexfds[i] < 0) {
|
||||
perror(indexname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
geomfiles[i] = fopen(geomname, "wb");
|
||||
geomfiles[i] = fopen_oflag(geomname, "wb", O_WRONLY | O_CLOEXEC);
|
||||
if (geomfiles[i] == NULL) {
|
||||
perror(geomname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
indexfiles[i] = fopen(indexname, "wb");
|
||||
indexfiles[i] = fopen_oflag(indexname, "wb", O_WRONLY | O_CLOEXEC);
|
||||
if (indexfiles[i] == NULL) {
|
||||
perror(indexname);
|
||||
exit(EXIT_FAILURE);
|
||||
@ -1024,7 +1032,7 @@ void choose_first_zoom(long long *file_bbox, struct reader *reader, unsigned *iz
|
||||
}
|
||||
}
|
||||
|
||||
int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minzoom, int basezoom, double basezoom_marker_width, sqlite3 *outdb, const char *outdir, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, double droprate, int buffer, const char *tmpdir, double gamma, int read_parallel, int forcetable, const char *attribution, bool uses_gamma, long long *file_bbox, const char *description, bool guess_maxzoom, std::map<std::string, int> const *attribute_types, const char *pgm) {
|
||||
int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzoom, int basezoom, double basezoom_marker_width, sqlite3 *outdb, const char *outdir, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, double droprate, int buffer, const char *tmpdir, double gamma, int read_parallel, int forcetable, const char *attribution, bool uses_gamma, long long *file_bbox, const char *prefilter, const char *postfilter, const char *description, bool guess_maxzoom, std::map<std::string, int> const *attribute_types, const char *pgm) {
|
||||
int ret = EXIT_SUCCESS;
|
||||
|
||||
struct reader reader[CPUS];
|
||||
@ -1043,33 +1051,33 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
sprintf(geomname, "%s%s", tmpdir, "/geom.XXXXXXXX");
|
||||
sprintf(indexname, "%s%s", tmpdir, "/index.XXXXXXXX");
|
||||
|
||||
r->metafd = mkstemp(metaname);
|
||||
r->metafd = mkstemp_cloexec(metaname);
|
||||
if (r->metafd < 0) {
|
||||
perror(metaname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
r->poolfd = mkstemp(poolname);
|
||||
r->poolfd = mkstemp_cloexec(poolname);
|
||||
if (r->poolfd < 0) {
|
||||
perror(poolname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
r->treefd = mkstemp(treename);
|
||||
r->treefd = mkstemp_cloexec(treename);
|
||||
if (r->treefd < 0) {
|
||||
perror(treename);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
r->geomfd = mkstemp(geomname);
|
||||
r->geomfd = mkstemp_cloexec(geomname);
|
||||
if (r->geomfd < 0) {
|
||||
perror(geomname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
r->indexfd = mkstemp(indexname);
|
||||
r->indexfd = mkstemp_cloexec(indexname);
|
||||
if (r->indexfd < 0) {
|
||||
perror(indexname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
r->metafile = fopen(metaname, "wb");
|
||||
r->metafile = fopen_oflag(metaname, "wb", O_WRONLY | O_CLOEXEC);
|
||||
if (r->metafile == NULL) {
|
||||
perror(metaname);
|
||||
exit(EXIT_FAILURE);
|
||||
@ -1084,12 +1092,12 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
perror(treename);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
r->geomfile = fopen(geomname, "wb");
|
||||
r->geomfile = fopen_oflag(geomname, "wb", O_WRONLY | O_CLOEXEC);
|
||||
if (r->geomfile == NULL) {
|
||||
perror(geomname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
r->indexfile = fopen(indexname, "wb");
|
||||
r->indexfile = fopen_oflag(indexname, "wb", O_WRONLY | O_CLOEXEC);
|
||||
if (r->indexfile == NULL) {
|
||||
perror(indexname);
|
||||
exit(EXIT_FAILURE);
|
||||
@ -1125,9 +1133,10 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
|
||||
volatile long long progress_seq = 0;
|
||||
|
||||
int initialized[CPUS];
|
||||
unsigned initial_x[CPUS], initial_y[CPUS];
|
||||
for (size_t i = 0; i < CPUS; i++) {
|
||||
// 2 * CPUS: One per reader thread, one per tiling thread
|
||||
int initialized[2 * CPUS];
|
||||
unsigned initial_x[2 * CPUS], initial_y[2 * CPUS];
|
||||
for (size_t i = 0; i < 2 * CPUS; i++) {
|
||||
initialized[i] = initial_x[i] = initial_y[i] = 0;
|
||||
}
|
||||
|
||||
@ -1201,7 +1210,7 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
fd = 0;
|
||||
} else {
|
||||
reading = sources[source].file;
|
||||
fd = open(sources[source].file.c_str(), O_RDONLY);
|
||||
fd = open(sources[source].file.c_str(), O_RDONLY, O_CLOEXEC);
|
||||
if (fd < 0) {
|
||||
perror(sources[source].file.c_str());
|
||||
continue;
|
||||
@ -1252,7 +1261,7 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
}
|
||||
|
||||
if (map != NULL && map != MAP_FAILED && read_parallel_this) {
|
||||
do_read_parallel(map, st.st_size - off, overall_offset, reading.c_str(), reader, &progress_seq, exclude, include, exclude_all, fname, basezoom, layer, nlayers, &layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, uses_gamma, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom);
|
||||
do_read_parallel(map, st.st_size - off, overall_offset, reading.c_str(), reader, &progress_seq, exclude, include, exclude_all, fname, basezoom, layer, nlayers, &layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, uses_gamma, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
|
||||
overall_offset += st.st_size - off;
|
||||
checkdisk(reader, CPUS);
|
||||
|
||||
@ -1284,7 +1293,7 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
|
||||
char readname[strlen(tmpdir) + strlen("/read.XXXXXXXX") + 1];
|
||||
sprintf(readname, "%s%s", tmpdir, "/read.XXXXXXXX");
|
||||
int readfd = mkstemp(readname);
|
||||
int readfd = mkstemp_cloexec(readname);
|
||||
if (readfd < 0) {
|
||||
perror(readname);
|
||||
exit(EXIT_FAILURE);
|
||||
@ -1328,7 +1337,7 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
}
|
||||
|
||||
fflush(readfp);
|
||||
start_parsing(readfd, readfp, initial_offset, ahead, &is_parsing, ¶llel_parser, parser_created, reading.c_str(), reader, &progress_seq, exclude, include, exclude_all, fname, basezoom, layer, nlayers, layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, gamma != 0, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom);
|
||||
start_parsing(readfd, readfp, initial_offset, ahead, &is_parsing, ¶llel_parser, parser_created, reading.c_str(), reader, &progress_seq, exclude, include, exclude_all, fname, basezoom, layer, nlayers, layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, gamma != 0, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
|
||||
|
||||
initial_offset += ahead;
|
||||
overall_offset += ahead;
|
||||
@ -1336,7 +1345,7 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
ahead = 0;
|
||||
|
||||
sprintf(readname, "%s%s", tmpdir, "/read.XXXXXXXX");
|
||||
readfd = mkstemp(readname);
|
||||
readfd = mkstemp_cloexec(readname);
|
||||
if (readfd < 0) {
|
||||
perror(readname);
|
||||
exit(EXIT_FAILURE);
|
||||
@ -1365,7 +1374,7 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
fflush(readfp);
|
||||
|
||||
if (ahead > 0) {
|
||||
start_parsing(readfd, readfp, initial_offset, ahead, &is_parsing, ¶llel_parser, parser_created, reading.c_str(), reader, &progress_seq, exclude, include, exclude_all, fname, basezoom, layer, nlayers, layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, gamma != 0, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom);
|
||||
start_parsing(readfd, readfp, initial_offset, ahead, &is_parsing, ¶llel_parser, parser_created, reading.c_str(), reader, &progress_seq, exclude, include, exclude_all, fname, basezoom, layer, nlayers, layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, gamma != 0, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
|
||||
|
||||
if (parser_created) {
|
||||
if (pthread_join(parallel_parser, NULL) != 0) {
|
||||
@ -1382,7 +1391,7 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
|
||||
long long layer_seq = overall_offset;
|
||||
json_pull *jp = json_begin_file(fp);
|
||||
parse_json(jp, reading.c_str(), &layer_seq, &progress_seq, &reader[0].metapos, &reader[0].geompos, &reader[0].indexpos, exclude, include, exclude_all, reader[0].metafile, reader[0].geomfile, reader[0].indexfile, reader[0].poolfile, reader[0].treefile, fname, basezoom, layer, droprate, reader[0].file_bbox, 0, &initialized[0], &initial_x[0], &initial_y[0], reader, maxzoom, &layermaps[0], sources[layer].layer, uses_gamma, attribute_types, &dist_sum, &dist_count, guess_maxzoom);
|
||||
parse_json(jp, reading.c_str(), &layer_seq, &progress_seq, &reader[0].metapos, &reader[0].geompos, &reader[0].indexpos, exclude, include, exclude_all, reader[0].metafile, reader[0].geomfile, reader[0].indexfile, reader[0].poolfile, reader[0].treefile, fname, basezoom, layer, droprate, reader[0].file_bbox, 0, &initialized[0], &initial_x[0], &initial_y[0], reader, maxzoom, &layermaps[0], sources[layer].layer, uses_gamma, attribute_types, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
|
||||
json_end(jp);
|
||||
overall_offset = layer_seq;
|
||||
checkdisk(reader, CPUS);
|
||||
@ -1434,19 +1443,23 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
// but keep track of the offsets into it since we still need
|
||||
// segment+offset to find the data.
|
||||
|
||||
long long pool_off[CPUS];
|
||||
long long meta_off[CPUS];
|
||||
// 2 * CPUS: One per input thread, one per tiling thread
|
||||
long long pool_off[2 * CPUS];
|
||||
long long meta_off[2 * CPUS];
|
||||
for (size_t i = 0; i < 2 * CPUS; i++) {
|
||||
pool_off[i] = meta_off[i] = 0;
|
||||
}
|
||||
|
||||
char poolname[strlen(tmpdir) + strlen("/pool.XXXXXXXX") + 1];
|
||||
sprintf(poolname, "%s%s", tmpdir, "/pool.XXXXXXXX");
|
||||
|
||||
int poolfd = mkstemp(poolname);
|
||||
int poolfd = mkstemp_cloexec(poolname);
|
||||
if (poolfd < 0) {
|
||||
perror(poolname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
FILE *poolfile = fopen(poolname, "wb");
|
||||
FILE *poolfile = fopen_oflag(poolname, "wb", O_WRONLY | O_CLOEXEC);
|
||||
if (poolfile == NULL) {
|
||||
perror(poolname);
|
||||
exit(EXIT_FAILURE);
|
||||
@ -1457,13 +1470,13 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
char metaname[strlen(tmpdir) + strlen("/meta.XXXXXXXX") + 1];
|
||||
sprintf(metaname, "%s%s", tmpdir, "/meta.XXXXXXXX");
|
||||
|
||||
int metafd = mkstemp(metaname);
|
||||
int metafd = mkstemp_cloexec(metaname);
|
||||
if (metafd < 0) {
|
||||
perror(metaname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
FILE *metafile = fopen(metaname, "wb");
|
||||
FILE *metafile = fopen_oflag(metaname, "wb", O_WRONLY | O_CLOEXEC);
|
||||
if (metafile == NULL) {
|
||||
perror(metaname);
|
||||
exit(EXIT_FAILURE);
|
||||
@ -1540,12 +1553,12 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
char indexname[strlen(tmpdir) + strlen("/index.XXXXXXXX") + 1];
|
||||
sprintf(indexname, "%s%s", tmpdir, "/index.XXXXXXXX");
|
||||
|
||||
int indexfd = mkstemp(indexname);
|
||||
int indexfd = mkstemp_cloexec(indexname);
|
||||
if (indexfd < 0) {
|
||||
perror(indexname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
FILE *indexfile = fopen(indexname, "wb");
|
||||
FILE *indexfile = fopen_oflag(indexname, "wb", O_WRONLY | O_CLOEXEC);
|
||||
if (indexfile == NULL) {
|
||||
perror(indexname);
|
||||
exit(EXIT_FAILURE);
|
||||
@ -1556,12 +1569,12 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
char geomname[strlen(tmpdir) + strlen("/geom.XXXXXXXX") + 1];
|
||||
sprintf(geomname, "%s%s", tmpdir, "/geom.XXXXXXXX");
|
||||
|
||||
int geomfd = mkstemp(geomname);
|
||||
int geomfd = mkstemp_cloexec(geomname);
|
||||
if (geomfd < 0) {
|
||||
perror(geomname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
FILE *geomfile = fopen(geomname, "wb");
|
||||
FILE *geomfile = fopen_oflag(geomname, "wb", O_WRONLY | O_CLOEXEC);
|
||||
if (geomfile == NULL) {
|
||||
perror(geomname);
|
||||
exit(EXIT_FAILURE);
|
||||
@ -1926,7 +1939,7 @@ int read_input(std::vector<source> &sources, char *fname, int &maxzoom, int minz
|
||||
}
|
||||
|
||||
unsigned midx = 0, midy = 0;
|
||||
int written = traverse_zooms(fd, size, meta, stringpool, &midx, &midy, maxzoom, minzoom, basezoom, outdb, outdir, droprate, buffer, fname, tmpdir, gamma, full_detail, low_detail, min_detail, meta_off, pool_off, initial_x, initial_y, simplification, layermaps);
|
||||
int written = traverse_zooms(fd, size, meta, stringpool, &midx, &midy, maxzoom, minzoom, basezoom, outdb, outdir, droprate, buffer, fname, tmpdir, gamma, full_detail, low_detail, min_detail, meta_off, pool_off, initial_x, initial_y, simplification, layermaps, prefilter, postfilter);
|
||||
|
||||
if (maxzoom != written) {
|
||||
fprintf(stderr, "\n\n\n*** NOTE TILES ONLY COMPLETE THROUGH ZOOM %d ***\n\n\n", written);
|
||||
@ -2064,6 +2077,8 @@ int main(int argc, char **argv) {
|
||||
const char *tmpdir = "/tmp";
|
||||
const char *attribution = NULL;
|
||||
std::vector<source> sources;
|
||||
const char *prefilter = NULL;
|
||||
const char *postfilter = NULL;
|
||||
bool guess_maxzoom = false;
|
||||
|
||||
std::set<std::string> exclude, include;
|
||||
@ -2158,6 +2173,10 @@ int main(int argc, char **argv) {
|
||||
{"Trying to correct bad source geometry", 0, 0, 0},
|
||||
{"detect-longitude-wraparound", no_argument, &additional[A_DETECT_WRAPAROUND], 1},
|
||||
|
||||
{"Filtering tile contents", 0, 0, 0},
|
||||
{"prefilter", required_argument, 0, 'C'},
|
||||
{"postfilter", required_argument, 0, 'c'},
|
||||
|
||||
{"Setting or disabling tile size limits", 0, 0, 0},
|
||||
{"maximum-tile-bytes", required_argument, 0, 'M'},
|
||||
{"no-feature-limit", no_argument, &prevent[P_FEATURE_LIMIT], 1},
|
||||
@ -2432,6 +2451,14 @@ int main(int argc, char **argv) {
|
||||
max_tile_size = atoll(optarg);
|
||||
break;
|
||||
|
||||
case 'c':
|
||||
postfilter = optarg;
|
||||
break;
|
||||
|
||||
case 'C':
|
||||
prefilter = optarg;
|
||||
break;
|
||||
|
||||
case 'T':
|
||||
set_attribute_type(attribute_types, optarg);
|
||||
break;
|
||||
@ -2470,8 +2497,17 @@ int main(int argc, char **argv) {
|
||||
}
|
||||
}
|
||||
|
||||
files_open_at_start = open("/dev/null", O_RDONLY);
|
||||
close(files_open_at_start);
|
||||
signal(SIGPIPE, SIG_IGN);
|
||||
|
||||
files_open_at_start = open("/dev/null", O_RDONLY | O_CLOEXEC);
|
||||
if (files_open_at_start < 0) {
|
||||
perror("open /dev/null");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (close(files_open_at_start) != 0) {
|
||||
perror("close");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
if (full_detail <= 0) {
|
||||
full_detail = 12;
|
||||
@ -2569,7 +2605,7 @@ int main(int argc, char **argv) {
|
||||
|
||||
long long file_bbox[4] = {UINT_MAX, UINT_MAX, 0, 0};
|
||||
|
||||
ret = read_input(sources, name ? name : out_mbtiles ? out_mbtiles : out_dir, maxzoom, minzoom, basezoom, basezoom_marker_width, outdb, out_dir, &exclude, &include, exclude_all, droprate, buffer, tmpdir, gamma, read_parallel, forcetable, attribution, gamma != 0, file_bbox, description, guess_maxzoom, &attribute_types, argv[0]);
|
||||
ret = read_input(sources, name ? name : out_mbtiles ? out_mbtiles : out_dir, maxzoom, minzoom, basezoom, basezoom_marker_width, outdb, out_dir, &exclude, &include, exclude_all, droprate, buffer, tmpdir, gamma, read_parallel, forcetable, attribution, gamma != 0, file_bbox, prefilter, postfilter, description, guess_maxzoom, &attribute_types, argv[0]);
|
||||
|
||||
if (outdb != NULL) {
|
||||
mbtiles_close(outdb, argv[0]);
|
||||
@ -2579,7 +2615,7 @@ int main(int argc, char **argv) {
|
||||
muntrace();
|
||||
#endif
|
||||
|
||||
i = open("/dev/null", O_RDONLY);
|
||||
i = open("/dev/null", O_RDONLY | O_CLOEXEC);
|
||||
// i < files_open_at_start is not an error, because reading from a pipe closes stdin
|
||||
if (i > files_open_at_start) {
|
||||
fprintf(stderr, "Internal error: did not close all files: %d\n", i);
|
||||
@ -2588,3 +2624,22 @@ int main(int argc, char **argv) {
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int mkstemp_cloexec(char *name) {
|
||||
int fd = mkstemp(name);
|
||||
if (fd >= 0) {
|
||||
if (fcntl(fd, F_SETFD, FD_CLOEXEC) < 0) {
|
||||
perror("cloexec for temporary file");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
return fd;
|
||||
}
|
||||
|
||||
FILE *fopen_oflag(const char *name, const char *mode, int oflag) {
|
||||
int fd = open(name, oflag);
|
||||
if (fd < 0) {
|
||||
return NULL;
|
||||
}
|
||||
return fdopen(fd, mode);
|
||||
}
|
||||
|
3
main.hpp
3
main.hpp
@ -22,6 +22,9 @@ extern size_t TEMP_FILES;
|
||||
|
||||
extern size_t max_tile_size;
|
||||
|
||||
int mkstemp_cloexec(char *name);
|
||||
FILE *fopen_oflag(const char *name, const char *mode, int oflag);
|
||||
|
||||
#define MAX_ZOOM 24
|
||||
|
||||
#endif
|
||||
|
@ -285,6 +285,64 @@ If you don't specify, it will use \fB\fC/tmp\fR\&.
|
||||
.IP \(bu 2
|
||||
\fB\fC\-v\fR or \fB\fC\-\-version\fR: Report Tippecanoe's version number
|
||||
.RE
|
||||
.SS Filters
|
||||
.RS
|
||||
.IP \(bu 2
|
||||
\fB\fC\-C\fR \fIcommand\fP or \fB\fC\-\-prefilter=\fR\fIcommand\fP: Specify a shell filter command to be run at the start of assembling each tile
|
||||
.IP \(bu 2
|
||||
\fB\fC\-c\fR \fIcommand\fP or \fB\fC\-\-postfilter=\fR\fIcommand\fP: Specify a shell filter command to be run at the end of assembling each tile
|
||||
.RE
|
||||
.PP
|
||||
The pre\- and post\-filter commands allow you to do optional filtering or transformation on the features of each tile
|
||||
as it is created. They are shell commands, run with the zoom level, X, and Y as the \fB\fC$1\fR, \fB\fC$2\fR, and \fB\fC$3\fR arguments.
|
||||
Future versions of Tippecanoe may add additional arguments for more context.
|
||||
.PP
|
||||
The features are provided to the filter
|
||||
as a series of newline\-delimited GeoJSON objects on the standard input, and \fB\fCtippecanoe\fR expects to read another
|
||||
set of GeoJSON features from the filter's standard output.
|
||||
.PP
|
||||
The prefilter receives the features at the highest available resolution, before line simplification,
|
||||
polygon topology repair, gamma calculation, dynamic feature dropping, or other internal processing.
|
||||
The postfilter receives the features at tile resolution, after simplification, cleaning, and dropping.
|
||||
.PP
|
||||
The layer name is provided as part of the \fB\fCtippecanoe\fR element of the feature and must be passed through
|
||||
to keep the feature in its correct layer. In the case of the prefilter, the \fB\fCtippecanoe\fR element may also
|
||||
contain \fB\fCindex\fR, \fB\fCsequence\fR, and \fB\fCextent\fR elements, which must be passed through for internal operations like
|
||||
\fB\fC\-\-drop\-densest\-as\-needed\fR, \fB\fC\-\-drop\-smallest\-as\-needed\fR, and \fB\fC\-\-preserve\-input\-order\fR to work.
|
||||
.SS Examples:
|
||||
.RS
|
||||
.IP \(bu 2
|
||||
Make a tileset of the Natural Earth countries to zoom level 5, and also copy the GeoJSON features
|
||||
to files in a \fB\fCtiles/z/x/y.geojson\fR directory hierarchy.
|
||||
.RE
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
tippecanoe \-o countries.mbtiles \-z5 \-C 'mkdir \-p tiles/$1/$2; tee tiles/$1/$2/$3.geojson' ne_10m_admin_0_countries.json
|
||||
.fi
|
||||
.RE
|
||||
.RS
|
||||
.IP \(bu 2
|
||||
Make a tileset of the Natural Earth countries to zoom level 5, but including only those tiles that
|
||||
intersect the bounding box of Germany \[la]https://www.flickr.com/places/info/23424829\[ra]\&.
|
||||
(The \fB\fClimit\-tiles\-to\-bbox\fR script is in the Tippecanoe source directory \[la]filters/limit-tiles-to-bbox\[ra]\&.)
|
||||
.RE
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
tippecanoe \-o countries.mbtiles \-z5 \-C './filters/limit\-tiles\-to\-bbox 5.8662 47.2702 15.0421 55.0581 $*' ne_10m_admin_0_countries.json
|
||||
.fi
|
||||
.RE
|
||||
.RS
|
||||
.IP \(bu 2
|
||||
Make a tileset of TIGER roads in Tippecanoe County, leaving out all but primary and secondary roads (as classified by TIGER \[la]https://www.census.gov/geo/reference/mtfcc.html\[ra]) below zoom level 11.
|
||||
.RE
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
tippecanoe \-o roads.mbtiles \-c 'if [ $1 \-lt 11 ]; then grep "\\"MTFCC\\": \\"S1[12]00\\""; else cat; fi' tl_2016_18157_roads.json
|
||||
.fi
|
||||
.RE
|
||||
.SH Environment
|
||||
.PP
|
||||
Tippecanoe ordinarily uses as many parallel threads as the operating system claims that CPUs are available.
|
||||
@ -600,5 +658,7 @@ resolutions.
|
||||
.IP \(bu 2
|
||||
\fB\fC\-l\fR \fIlayer\fP or \fB\fC\-\-layer=\fR\fIlayer\fP: Decode only layers with the specified names. (Multiple \fB\fC\-l\fR options can be specified.)
|
||||
.IP \(bu 2
|
||||
\fB\fC\-c\fR or \fB\fC\-\-tag\-layer\-and\-zoom\fR: Include each feature's layer and zoom level as part of its \fB\fCtippecanoe\fR object rather than as a FeatureCollection wrapper
|
||||
.IP \(bu 2
|
||||
\fB\fC\-f\fR or \fB\fC\-\-force\fR: Decode tiles even if polygon ring order or closure problems are detected
|
||||
.RE
|
||||
|
45
mbtiles.cpp
45
mbtiles.cpp
@ -85,26 +85,21 @@ void mbtiles_write_tile(sqlite3 *outdb, int z, int tx, int ty, const char *data,
|
||||
}
|
||||
}
|
||||
|
||||
static void quote(std::string *buf, const char *s) {
|
||||
char tmp[strlen(s) * 8 + 1];
|
||||
char *out = tmp;
|
||||
|
||||
for (; *s != '\0'; s++) {
|
||||
unsigned char ch = (unsigned char) *s;
|
||||
static void quote(std::string &buf, std::string const &s) {
|
||||
for (size_t i = 0; i < s.size(); i++) {
|
||||
unsigned char ch = s[i];
|
||||
|
||||
if (ch == '\\' || ch == '\"') {
|
||||
*out++ = '\\';
|
||||
*out++ = ch;
|
||||
buf.push_back('\\');
|
||||
buf.push_back(ch);
|
||||
} else if (ch < ' ') {
|
||||
sprintf(out, "\\u%04x", ch);
|
||||
out = out + strlen(out);
|
||||
char tmp[7];
|
||||
sprintf(tmp, "\\u%04x", ch);
|
||||
buf.append(std::string(tmp));
|
||||
} else {
|
||||
*out++ = ch;
|
||||
buf.push_back(ch);
|
||||
}
|
||||
}
|
||||
|
||||
*out = '\0';
|
||||
buf->append(tmp, strlen(tmp));
|
||||
}
|
||||
|
||||
void aprintf(std::string *buf, const char *format, ...) {
|
||||
@ -166,7 +161,7 @@ std::string tilestats(std::map<std::string, layermap_entry> const &layermap1) {
|
||||
out.append("\t\t{\n");
|
||||
|
||||
out.append("\t\t\t\"layer\": \"");
|
||||
quote(&out, layer.first.c_str());
|
||||
quote(out, layer.first.c_str());
|
||||
out.append("\",\n");
|
||||
|
||||
out.append("\t\t\t\"count\": ");
|
||||
@ -181,7 +176,7 @@ std::string tilestats(std::map<std::string, layermap_entry> const &layermap1) {
|
||||
}
|
||||
|
||||
out.append("\t\t\t\"geometry\": \"");
|
||||
quote(&out, geomtype.c_str());
|
||||
quote(out, geomtype.c_str());
|
||||
out.append("\",\n");
|
||||
|
||||
size_t attrib_count = layer.second.file_keys.size();
|
||||
@ -208,7 +203,7 @@ std::string tilestats(std::map<std::string, layermap_entry> const &layermap1) {
|
||||
out.append("\t\t\t\t{\n");
|
||||
|
||||
out.append("\t\t\t\t\t\"attribute\": \"");
|
||||
quote(&out, attribute.first.c_str());
|
||||
quote(out, attribute.first.c_str());
|
||||
out.append("\",\n");
|
||||
|
||||
size_t val_count = attribute.second.sample_values.size();
|
||||
@ -238,7 +233,7 @@ std::string tilestats(std::map<std::string, layermap_entry> const &layermap1) {
|
||||
}
|
||||
|
||||
out.append("\t\t\t\t\t\"type\": \"");
|
||||
quote(&out, type_str.c_str());
|
||||
quote(out, type_str.c_str());
|
||||
out.append("\",\n");
|
||||
|
||||
out.append("\t\t\t\t\t\"values\": [\n");
|
||||
@ -261,7 +256,7 @@ std::string tilestats(std::map<std::string, layermap_entry> const &layermap1) {
|
||||
|
||||
if (trunc.size() == value.string.size()) {
|
||||
out.append("\t\t\t\t\t\t\"");
|
||||
quote(&out, value.string.c_str());
|
||||
quote(out, value.string.c_str());
|
||||
out.append("\"");
|
||||
}
|
||||
}
|
||||
@ -427,7 +422,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
|
||||
auto fk = layermap.find(lnames[i]);
|
||||
aprintf(&buf, "{ \"id\": \"");
|
||||
quote(&buf, lnames[i].c_str());
|
||||
quote(buf, lnames[i]);
|
||||
aprintf(&buf, "\", \"description\": \"\", \"minzoom\": %d, \"maxzoom\": %d, \"fields\": {", fk->second.minzoom, fk->second.maxzoom);
|
||||
|
||||
bool first = true;
|
||||
@ -439,7 +434,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
}
|
||||
|
||||
aprintf(&buf, "\"");
|
||||
quote(&buf, j->first.c_str());
|
||||
quote(buf, j->first.c_str());
|
||||
|
||||
int type = 0;
|
||||
for (auto s : j->second.sample_values) {
|
||||
@ -490,8 +485,8 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
while (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
std::string key, value;
|
||||
|
||||
quote(&key, (const char *) sqlite3_column_text(stmt, 0));
|
||||
quote(&value, (const char *) sqlite3_column_text(stmt, 1));
|
||||
quote(key, (const char *) sqlite3_column_text(stmt, 0));
|
||||
quote(value, (const char *) sqlite3_column_text(stmt, 1));
|
||||
|
||||
if (!first) {
|
||||
fprintf(fp, ",\n");
|
||||
@ -536,6 +531,10 @@ std::map<std::string, layermap_entry> merge_layermaps(std::vector<std::map<std::
|
||||
|
||||
for (size_t i = 0; i < maps.size(); i++) {
|
||||
for (auto map = maps[i].begin(); map != maps[i].end(); ++map) {
|
||||
if (map->second.points + map->second.lines + map->second.polygons == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
std::string layername = map->first;
|
||||
if (trunc) {
|
||||
layername = truncate16(layername, 256);
|
||||
|
69
mvt.cpp
69
mvt.cpp
@ -4,7 +4,10 @@
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <zlib.h>
|
||||
#include <errno.h>
|
||||
#include <limits.h>
|
||||
#include "mvt.hpp"
|
||||
#include "geometry.hpp"
|
||||
#include "protozero/varint.hpp"
|
||||
#include "protozero/pbf_reader.hpp"
|
||||
#include "protozero/pbf_writer.hpp"
|
||||
@ -416,3 +419,69 @@ void mvt_layer::tag(mvt_feature &feature, std::string key, mvt_value value) {
|
||||
feature.tags.push_back(ko);
|
||||
feature.tags.push_back(vo);
|
||||
}
|
||||
|
||||
static int is_integer(const char *s, long long *v) {
|
||||
errno = 0;
|
||||
char *endptr;
|
||||
|
||||
*v = strtoll(s, &endptr, 0);
|
||||
if (*v == 0 && errno != 0) {
|
||||
return 0;
|
||||
}
|
||||
if ((*v == LLONG_MIN || *v == LLONG_MAX) && (errno == ERANGE)) {
|
||||
return 0;
|
||||
}
|
||||
if (*endptr != '\0') {
|
||||
// Special case: If it is an integer followed by .0000 or similar,
|
||||
// it is still an integer
|
||||
|
||||
if (*endptr != '.') {
|
||||
return 0;
|
||||
}
|
||||
endptr++;
|
||||
for (; *endptr != '\0'; endptr++) {
|
||||
if (*endptr != '0') {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
mvt_value stringified_to_mvt_value(int type, const char *s) {
|
||||
mvt_value tv;
|
||||
|
||||
if (type == mvt_double) {
|
||||
long long v;
|
||||
if (is_integer(s, &v)) {
|
||||
if (v >= 0) {
|
||||
tv.type = mvt_int;
|
||||
tv.numeric_value.int_value = v;
|
||||
} else {
|
||||
tv.type = mvt_sint;
|
||||
tv.numeric_value.sint_value = v;
|
||||
}
|
||||
} else {
|
||||
double d = atof(s);
|
||||
|
||||
if (d == (float) d) {
|
||||
tv.type = mvt_float;
|
||||
tv.numeric_value.float_value = d;
|
||||
} else {
|
||||
tv.type = mvt_double;
|
||||
tv.numeric_value.double_value = d;
|
||||
}
|
||||
}
|
||||
} else if (type == mvt_bool) {
|
||||
tv.type = mvt_bool;
|
||||
tv.numeric_value.bool_value = (s[0] == 't');
|
||||
} else {
|
||||
tv.type = mvt_string;
|
||||
tv.string_value = s;
|
||||
}
|
||||
|
||||
return tv;
|
||||
}
|
||||
|
7
mvt.hpp
7
mvt.hpp
@ -17,8 +17,8 @@ enum mvt_operation {
|
||||
};
|
||||
|
||||
struct mvt_geometry {
|
||||
int x;
|
||||
int y;
|
||||
long long x;
|
||||
long long y;
|
||||
int /* mvt_operation */ op;
|
||||
|
||||
mvt_geometry(int op, long long x, long long y);
|
||||
@ -86,7 +86,7 @@ struct mvt_layer {
|
||||
std::vector<mvt_feature> features;
|
||||
std::vector<std::string> keys;
|
||||
std::vector<mvt_value> values;
|
||||
int extent;
|
||||
long long extent;
|
||||
|
||||
// Add a key-value pair to a feature, using this layer's constant pool
|
||||
void tag(mvt_feature &feature, std::string key, mvt_value value);
|
||||
@ -108,4 +108,5 @@ int decompress(std::string const &input, std::string &output);
|
||||
int compress(std::string const &input, std::string &output);
|
||||
int dezig(unsigned n);
|
||||
|
||||
mvt_value stringified_to_mvt_value(int type, const char *s);
|
||||
#endif
|
||||
|
650
plugin.cpp
Normal file
650
plugin.cpp
Normal file
@ -0,0 +1,650 @@
|
||||
#ifdef __APPLE__
|
||||
#define _DARWIN_UNLIMITED_STREAMS
|
||||
#endif
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <set>
|
||||
#include <pthread.h>
|
||||
#include <unistd.h>
|
||||
#include <fcntl.h>
|
||||
#include <errno.h>
|
||||
#include <cmath>
|
||||
#include <sys/types.h>
|
||||
#include <sys/wait.h>
|
||||
#include <sqlite3.h>
|
||||
#include <limits.h>
|
||||
#include "main.hpp"
|
||||
#include "mvt.hpp"
|
||||
#include "mbtiles.hpp"
|
||||
#include "projection.hpp"
|
||||
#include "geometry.hpp"
|
||||
#include "serial.hpp"
|
||||
|
||||
extern "C" {
|
||||
#include "jsonpull/jsonpull.h"
|
||||
}
|
||||
|
||||
#include "plugin.hpp"
|
||||
#include "write_json.hpp"
|
||||
#include "read_json.hpp"
|
||||
|
||||
struct writer_arg {
|
||||
int write_to;
|
||||
std::vector<mvt_layer> *layers;
|
||||
unsigned z;
|
||||
unsigned x;
|
||||
unsigned y;
|
||||
int extent;
|
||||
};
|
||||
|
||||
void *run_writer(void *a) {
|
||||
writer_arg *wa = (writer_arg *) a;
|
||||
|
||||
FILE *fp = fdopen(wa->write_to, "w");
|
||||
if (fp == NULL) {
|
||||
perror("fdopen (pipe writer)");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < wa->layers->size(); i++) {
|
||||
layer_to_geojson(fp, (*(wa->layers))[i], wa->z, wa->x, wa->y, false, true, false, 0, 0, 0, true);
|
||||
}
|
||||
|
||||
if (fclose(fp) != 0) {
|
||||
if (errno == EPIPE) {
|
||||
static bool warned = false;
|
||||
if (!warned) {
|
||||
fprintf(stderr, "Warning: broken pipe in postfilter\n");
|
||||
warned = true;
|
||||
}
|
||||
} else {
|
||||
perror("fclose output to filter");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// XXX deduplicate
|
||||
static std::vector<mvt_geometry> to_feature(drawvec &geom) {
|
||||
std::vector<mvt_geometry> out;
|
||||
|
||||
for (size_t i = 0; i < geom.size(); i++) {
|
||||
out.push_back(mvt_geometry(geom[i].op, geom[i].x, geom[i].y));
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
// Reads from the postfilter
|
||||
std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::vector<std::map<std::string, layermap_entry>> *layermaps, size_t tiling_seg, std::vector<std::vector<std::string>> *layer_unmaps, int extent) {
|
||||
std::map<std::string, mvt_layer> ret;
|
||||
|
||||
FILE *f = fdopen(fd, "r");
|
||||
if (f == NULL) {
|
||||
perror("fdopen filter output");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
json_pull *jp = json_begin_file(f);
|
||||
|
||||
while (1) {
|
||||
json_object *j = json_read(jp);
|
||||
if (j == NULL) {
|
||||
if (jp->error != NULL) {
|
||||
fprintf(stderr, "Filter output:%d: %s\n", jp->line, jp->error);
|
||||
if (jp->root != NULL) {
|
||||
json_context(jp->root);
|
||||
}
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_free(jp->root);
|
||||
break;
|
||||
}
|
||||
|
||||
json_object *type = json_hash_get(j, "type");
|
||||
if (type == NULL || type->type != JSON_STRING) {
|
||||
continue;
|
||||
}
|
||||
if (strcmp(type->string, "Feature") != 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
json_object *geometry = json_hash_get(j, "geometry");
|
||||
if (geometry == NULL) {
|
||||
fprintf(stderr, "Filter output:%d: filtered feature with no geometry\n", jp->line);
|
||||
json_context(j);
|
||||
json_free(j);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_object *properties = json_hash_get(j, "properties");
|
||||
if (properties == NULL || (properties->type != JSON_HASH && properties->type != JSON_NULL)) {
|
||||
fprintf(stderr, "Filter output:%d: feature without properties hash\n", jp->line);
|
||||
json_context(j);
|
||||
json_free(j);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_object *geometry_type = json_hash_get(geometry, "type");
|
||||
if (geometry_type == NULL) {
|
||||
fprintf(stderr, "Filter output:%d: null geometry (additional not reported)\n", jp->line);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
if (geometry_type->type != JSON_STRING) {
|
||||
fprintf(stderr, "Filter output:%d: geometry type is not a string\n", jp->line);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_object *coordinates = json_hash_get(geometry, "coordinates");
|
||||
if (coordinates == NULL || coordinates->type != JSON_ARRAY) {
|
||||
fprintf(stderr, "Filter output:%d: feature without coordinates array\n", jp->line);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
int t;
|
||||
for (t = 0; t < GEOM_TYPES; t++) {
|
||||
if (strcmp(geometry_type->string, geometry_names[t]) == 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (t >= GEOM_TYPES) {
|
||||
fprintf(stderr, "Filter output:%d: Can't handle geometry type %s\n", jp->line, geometry_type->string);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
std::string layername = "unknown";
|
||||
json_object *tippecanoe = json_hash_get(j, "tippecanoe");
|
||||
json_object *layer = NULL;
|
||||
if (tippecanoe != NULL) {
|
||||
layer = json_hash_get(tippecanoe, "layer");
|
||||
if (layer != NULL && layer->type == JSON_STRING) {
|
||||
layername = std::string(layer->string);
|
||||
}
|
||||
}
|
||||
|
||||
if (ret.count(layername) == 0) {
|
||||
mvt_layer l;
|
||||
l.name = layername;
|
||||
l.version = 2;
|
||||
l.extent = extent;
|
||||
|
||||
ret.insert(std::pair<std::string, mvt_layer>(layername, l));
|
||||
}
|
||||
auto l = ret.find(layername);
|
||||
|
||||
drawvec dv;
|
||||
parse_geometry(t, coordinates, dv, VT_MOVETO, "Filter output", jp->line, j);
|
||||
if (mb_geometry[t] == VT_POLYGON) {
|
||||
dv = fix_polygon(dv);
|
||||
}
|
||||
|
||||
// Scale and offset geometry from global to tile
|
||||
for (size_t i = 0; i < dv.size(); i++) {
|
||||
long long scale = 1LL << (32 - z);
|
||||
dv[i].x = std::round((dv[i].x - scale * x) * extent / (double) scale);
|
||||
dv[i].y = std::round((dv[i].y - scale * y) * extent / (double) scale);
|
||||
}
|
||||
|
||||
if (mb_geometry[t] == VT_POLYGON) {
|
||||
dv = clean_or_clip_poly(dv, 0, 0, 0, false);
|
||||
if (dv.size() < 3) {
|
||||
dv.clear();
|
||||
}
|
||||
}
|
||||
dv = remove_noop(dv, mb_geometry[t], 0);
|
||||
if (mb_geometry[t] == VT_POLYGON) {
|
||||
dv = close_poly(dv);
|
||||
}
|
||||
|
||||
if (dv.size() > 0) {
|
||||
mvt_feature feature;
|
||||
feature.type = mb_geometry[t];
|
||||
feature.geometry = to_feature(dv);
|
||||
|
||||
json_object *id = json_hash_get(j, "id");
|
||||
if (id != NULL) {
|
||||
feature.id = atoll(id->string);
|
||||
feature.has_id = true;
|
||||
}
|
||||
|
||||
std::map<std::string, layermap_entry> &layermap = (*layermaps)[tiling_seg];
|
||||
if (layermap.count(layername) == 0) {
|
||||
layermap_entry lme = layermap_entry(layermap.size());
|
||||
lme.minzoom = z;
|
||||
lme.maxzoom = z;
|
||||
|
||||
layermap.insert(std::pair<std::string, layermap_entry>(layername, lme));
|
||||
|
||||
if (lme.id >= (*layer_unmaps)[tiling_seg].size()) {
|
||||
(*layer_unmaps)[tiling_seg].resize(lme.id + 1);
|
||||
(*layer_unmaps)[tiling_seg][lme.id] = layername;
|
||||
}
|
||||
}
|
||||
|
||||
auto fk = layermap.find(layername);
|
||||
if (fk == layermap.end()) {
|
||||
fprintf(stderr, "Internal error: layer %s not found\n", layername.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (z < fk->second.minzoom) {
|
||||
fk->second.minzoom = z;
|
||||
}
|
||||
if (z > fk->second.maxzoom) {
|
||||
fk->second.maxzoom = z;
|
||||
}
|
||||
|
||||
if (feature.type == mvt_point) {
|
||||
fk->second.points++;
|
||||
} else if (feature.type == mvt_linestring) {
|
||||
fk->second.lines++;
|
||||
} else if (feature.type == mvt_polygon) {
|
||||
fk->second.polygons++;
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < properties->length; i++) {
|
||||
int tp = -1;
|
||||
std::string s;
|
||||
|
||||
std::map<std::string, int> nullmap;
|
||||
stringify_value(properties->values[i], tp, s, "Filter output", jp->line, j, "", &nullmap);
|
||||
if (tp >= 0) {
|
||||
mvt_value v = stringified_to_mvt_value(tp, s.c_str());
|
||||
l->second.tag(feature, std::string(properties->keys[i]->string), v);
|
||||
|
||||
type_and_string attrib;
|
||||
attrib.type = tp;
|
||||
attrib.string = s;
|
||||
|
||||
add_to_file_keys(fk->second.file_keys, std::string(properties->keys[i]->string), attrib);
|
||||
}
|
||||
}
|
||||
|
||||
l->second.features.push_back(feature);
|
||||
}
|
||||
|
||||
json_free(j);
|
||||
}
|
||||
|
||||
json_end(jp);
|
||||
if (fclose(f) != 0) {
|
||||
perror("fclose postfilter output");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
std::vector<mvt_layer> final;
|
||||
for (auto a : ret) {
|
||||
final.push_back(a.second);
|
||||
}
|
||||
return final;
|
||||
}
|
||||
|
||||
// Reads from the prefilter
|
||||
serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::vector<std::map<std::string, layermap_entry>> *layermaps, size_t tiling_seg, std::vector<std::vector<std::string>> *layer_unmaps, bool postfilter) {
|
||||
serial_feature sf;
|
||||
|
||||
while (1) {
|
||||
json_object *j = json_read(jp);
|
||||
if (j == NULL) {
|
||||
if (jp->error != NULL) {
|
||||
fprintf(stderr, "Filter output:%d: %s\n", jp->line, jp->error);
|
||||
if (jp->root != NULL) {
|
||||
json_context(jp->root);
|
||||
}
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_free(jp->root);
|
||||
sf.t = -1;
|
||||
return sf;
|
||||
}
|
||||
|
||||
json_object *type = json_hash_get(j, "type");
|
||||
if (type == NULL || type->type != JSON_STRING) {
|
||||
continue;
|
||||
}
|
||||
if (strcmp(type->string, "Feature") != 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
json_object *geometry = json_hash_get(j, "geometry");
|
||||
if (geometry == NULL) {
|
||||
fprintf(stderr, "Filter output:%d: filtered feature with no geometry\n", jp->line);
|
||||
json_context(j);
|
||||
json_free(j);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_object *properties = json_hash_get(j, "properties");
|
||||
if (properties == NULL || (properties->type != JSON_HASH && properties->type != JSON_NULL)) {
|
||||
fprintf(stderr, "Filter output:%d: feature without properties hash\n", jp->line);
|
||||
json_context(j);
|
||||
json_free(j);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_object *geometry_type = json_hash_get(geometry, "type");
|
||||
if (geometry_type == NULL) {
|
||||
fprintf(stderr, "Filter output:%d: null geometry (additional not reported)\n", jp->line);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
if (geometry_type->type != JSON_STRING) {
|
||||
fprintf(stderr, "Filter output:%d: geometry type is not a string\n", jp->line);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_object *coordinates = json_hash_get(geometry, "coordinates");
|
||||
if (coordinates == NULL || coordinates->type != JSON_ARRAY) {
|
||||
fprintf(stderr, "Filter output:%d: feature without coordinates array\n", jp->line);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
int t;
|
||||
for (t = 0; t < GEOM_TYPES; t++) {
|
||||
if (strcmp(geometry_type->string, geometry_names[t]) == 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (t >= GEOM_TYPES) {
|
||||
fprintf(stderr, "Filter output:%d: Can't handle geometry type %s\n", jp->line, geometry_type->string);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
drawvec dv;
|
||||
parse_geometry(t, coordinates, dv, VT_MOVETO, "Filter output", jp->line, j);
|
||||
if (mb_geometry[t] == VT_POLYGON) {
|
||||
dv = fix_polygon(dv);
|
||||
}
|
||||
|
||||
// Scale and offset geometry from global to tile
|
||||
double scale = 1LL << geometry_scale;
|
||||
for (size_t i = 0; i < dv.size(); i++) {
|
||||
unsigned sx = 0, sy = 0;
|
||||
if (z != 0) {
|
||||
sx = x << (32 - z);
|
||||
sy = y << (32 - z);
|
||||
}
|
||||
dv[i].x = std::round(dv[i].x / scale) * scale - sx;
|
||||
dv[i].y = std::round(dv[i].y / scale) * scale - sy;
|
||||
}
|
||||
|
||||
if (dv.size() > 0) {
|
||||
sf.t = mb_geometry[t];
|
||||
sf.segment = tiling_seg;
|
||||
sf.geometry = dv;
|
||||
sf.seq = 0;
|
||||
sf.index = 0;
|
||||
sf.bbox[0] = sf.bbox[1] = LLONG_MAX;
|
||||
sf.bbox[2] = sf.bbox[3] = LLONG_MIN;
|
||||
sf.extent = 0;
|
||||
sf.m = 0;
|
||||
sf.metapos = 0;
|
||||
sf.has_id = false;
|
||||
|
||||
std::string layername = "unknown";
|
||||
json_object *tippecanoe = json_hash_get(j, "tippecanoe");
|
||||
if (tippecanoe != NULL) {
|
||||
json_object *layer = json_hash_get(tippecanoe, "layer");
|
||||
if (layer != NULL && layer->type == JSON_STRING) {
|
||||
layername = std::string(layer->string);
|
||||
}
|
||||
|
||||
json_object *index = json_hash_get(tippecanoe, "index");
|
||||
if (index != NULL && index->type == JSON_NUMBER) {
|
||||
sf.index = index->number;
|
||||
}
|
||||
|
||||
json_object *sequence = json_hash_get(tippecanoe, "sequence");
|
||||
if (sequence != NULL && sequence->type == JSON_NUMBER) {
|
||||
sf.seq = sequence->number;
|
||||
}
|
||||
|
||||
json_object *extent = json_hash_get(tippecanoe, "extent");
|
||||
if (extent != NULL && sequence->type == JSON_NUMBER) {
|
||||
sf.extent = extent->number;
|
||||
}
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < dv.size(); i++) {
|
||||
if (dv[i].op == VT_MOVETO || dv[i].op == VT_LINETO) {
|
||||
if (dv[i].x < sf.bbox[0]) {
|
||||
sf.bbox[0] = dv[i].x;
|
||||
}
|
||||
if (dv[i].y < sf.bbox[1]) {
|
||||
sf.bbox[1] = dv[i].y;
|
||||
}
|
||||
if (dv[i].x > sf.bbox[2]) {
|
||||
sf.bbox[2] = dv[i].x;
|
||||
}
|
||||
if (dv[i].y > sf.bbox[3]) {
|
||||
sf.bbox[3] = dv[i].y;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
json_object *id = json_hash_get(j, "id");
|
||||
if (id != NULL) {
|
||||
sf.id = atoll(id->string);
|
||||
sf.has_id = true;
|
||||
}
|
||||
|
||||
std::map<std::string, layermap_entry> &layermap = (*layermaps)[tiling_seg];
|
||||
|
||||
if (layermap.count(layername) == 0) {
|
||||
layermap_entry lme = layermap_entry(layermap.size());
|
||||
lme.minzoom = z;
|
||||
lme.maxzoom = z;
|
||||
|
||||
layermap.insert(std::pair<std::string, layermap_entry>(layername, lme));
|
||||
|
||||
if (lme.id >= (*layer_unmaps)[tiling_seg].size()) {
|
||||
(*layer_unmaps)[tiling_seg].resize(lme.id + 1);
|
||||
(*layer_unmaps)[tiling_seg][lme.id] = layername;
|
||||
}
|
||||
}
|
||||
|
||||
auto fk = layermap.find(layername);
|
||||
if (fk == layermap.end()) {
|
||||
fprintf(stderr, "Internal error: layer %s not found\n", layername.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
sf.layer = fk->second.id;
|
||||
|
||||
if (z < fk->second.minzoom) {
|
||||
fk->second.minzoom = z;
|
||||
}
|
||||
if (z > fk->second.maxzoom) {
|
||||
fk->second.maxzoom = z;
|
||||
}
|
||||
|
||||
if (!postfilter) {
|
||||
if (sf.t == mvt_point) {
|
||||
fk->second.points++;
|
||||
} else if (sf.t == mvt_linestring) {
|
||||
fk->second.lines++;
|
||||
} else if (sf.t == mvt_polygon) {
|
||||
fk->second.polygons++;
|
||||
}
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < properties->length; i++) {
|
||||
serial_val v;
|
||||
v.type = -1;
|
||||
|
||||
std::map<std::string, int> nullmap;
|
||||
stringify_value(properties->values[i], v.type, v.s, "Filter output", jp->line, j, "", &nullmap);
|
||||
|
||||
if (v.type >= 0) {
|
||||
sf.full_keys.push_back(std::string(properties->keys[i]->string));
|
||||
sf.full_values.push_back(v);
|
||||
|
||||
type_and_string attrib;
|
||||
attrib.string = v.s;
|
||||
attrib.type = v.type;
|
||||
|
||||
if (!postfilter) {
|
||||
add_to_file_keys(fk->second.file_keys, std::string(properties->keys[i]->string), attrib);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
json_free(j);
|
||||
return sf;
|
||||
}
|
||||
|
||||
json_free(j);
|
||||
}
|
||||
}
|
||||
|
||||
static pthread_mutex_t pipe_lock = PTHREAD_MUTEX_INITIALIZER;
|
||||
|
||||
void setup_filter(const char *filter, int *write_to, int *read_from, pid_t *pid, unsigned z, unsigned x, unsigned y) {
|
||||
// This will create two pipes, a new thread, and a new process.
|
||||
//
|
||||
// The new process will read from one pipe and write to the other, and execute the filter.
|
||||
// The new thread will write the GeoJSON to the pipe that leads to the filter.
|
||||
// The original thread will read the GeoJSON from the filter and convert it back into vector tiles.
|
||||
|
||||
if (pthread_mutex_lock(&pipe_lock) != 0) {
|
||||
perror("pthread_mutex_lock (pipe)");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
int pipe_orig[2], pipe_filtered[2];
|
||||
if (pipe(pipe_orig) < 0) {
|
||||
perror("pipe (original features)");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (pipe(pipe_filtered) < 0) {
|
||||
perror("pipe (filtered features)");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
std::string z_str = std::to_string(z);
|
||||
std::string x_str = std::to_string(x);
|
||||
std::string y_str = std::to_string(y);
|
||||
|
||||
*pid = fork();
|
||||
if (*pid < 0) {
|
||||
perror("fork");
|
||||
exit(EXIT_FAILURE);
|
||||
} else if (*pid == 0) {
|
||||
// child
|
||||
|
||||
if (dup2(pipe_orig[0], 0) < 0) {
|
||||
perror("dup child stdin");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (dup2(pipe_filtered[1], 1) < 0) {
|
||||
perror("dup child stdout");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (close(pipe_orig[1]) != 0) {
|
||||
perror("close output to filter");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (close(pipe_filtered[0]) != 0) {
|
||||
perror("close input from filter");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (close(pipe_orig[0]) != 0) {
|
||||
perror("close dup input of filter");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (close(pipe_filtered[1]) != 0) {
|
||||
perror("close dup output of filter");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
// XXX close other fds?
|
||||
|
||||
if (execlp("sh", "sh", "-c", filter, "sh", z_str.c_str(), x_str.c_str(), y_str.c_str(), NULL) != 0) {
|
||||
perror("exec");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
} else {
|
||||
// parent
|
||||
|
||||
if (close(pipe_orig[0]) != 0) {
|
||||
perror("close filter-side reader");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (close(pipe_filtered[1]) != 0) {
|
||||
perror("close filter-side writer");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (fcntl(pipe_orig[1], F_SETFD, FD_CLOEXEC) != 0) {
|
||||
perror("cloxec output to filter");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (fcntl(pipe_filtered[0], F_SETFD, FD_CLOEXEC) != 0) {
|
||||
perror("cloxec input from filter");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
if (pthread_mutex_unlock(&pipe_lock) != 0) {
|
||||
perror("pthread_mutex_unlock (pipe_lock)");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
*write_to = pipe_orig[1];
|
||||
*read_from = pipe_filtered[0];
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<mvt_layer> filter_layers(const char *filter, std::vector<mvt_layer> &layers, unsigned z, unsigned x, unsigned y, std::vector<std::map<std::string, layermap_entry>> *layermaps, size_t tiling_seg, std::vector<std::vector<std::string>> *layer_unmaps, int extent) {
|
||||
int write_to, read_from;
|
||||
pid_t pid;
|
||||
setup_filter(filter, &write_to, &read_from, &pid, z, x, y);
|
||||
|
||||
writer_arg wa;
|
||||
wa.write_to = write_to;
|
||||
wa.layers = &layers;
|
||||
wa.z = z;
|
||||
wa.x = x;
|
||||
wa.y = y;
|
||||
wa.extent = extent;
|
||||
|
||||
pthread_t writer;
|
||||
if (pthread_create(&writer, NULL, run_writer, &wa) != 0) {
|
||||
perror("pthread_create (filter writer)");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
std::vector<mvt_layer> nlayers = parse_layers(read_from, z, x, y, layermaps, tiling_seg, layer_unmaps, extent);
|
||||
|
||||
while (1) {
|
||||
int stat_loc;
|
||||
if (waitpid(pid, &stat_loc, 0) < 0) {
|
||||
perror("waitpid for filter\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (WIFEXITED(stat_loc) || WIFSIGNALED(stat_loc)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void *ret;
|
||||
if (pthread_join(writer, &ret) != 0) {
|
||||
perror("pthread_join filter writer");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
return nlayers;
|
||||
}
|
3
plugin.hpp
Normal file
3
plugin.hpp
Normal file
@ -0,0 +1,3 @@
|
||||
std::vector<mvt_layer> filter_layers(const char *filter, std::vector<mvt_layer> &layer, unsigned z, unsigned x, unsigned y, std::vector<std::map<std::string, layermap_entry>> *layermaps, size_t tiling_seg, std::vector<std::vector<std::string>> *layer_unmaps, int extent);
|
||||
void setup_filter(const char *filter, int *write_to, int *read_from, pid_t *pid, unsigned z, unsigned x, unsigned y);
|
||||
serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::vector<std::map<std::string, layermap_entry>> *layermaps, size_t tiling_seg, std::vector<std::vector<std::string>> *layer_unmaps, bool filters);
|
184
read_json.cpp
Normal file
184
read_json.cpp
Normal file
@ -0,0 +1,184 @@
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <math.h>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <map>
|
||||
|
||||
extern "C" {
|
||||
#include "jsonpull/jsonpull.h"
|
||||
}
|
||||
|
||||
#include "geometry.hpp"
|
||||
#include "projection.hpp"
|
||||
#include "read_json.hpp"
|
||||
#include "text.hpp"
|
||||
#include "mvt.hpp"
|
||||
|
||||
const char *geometry_names[GEOM_TYPES] = {
|
||||
"Point", "MultiPoint", "LineString", "MultiLineString", "Polygon", "MultiPolygon",
|
||||
};
|
||||
|
||||
int geometry_within[GEOM_TYPES] = {
|
||||
-1, /* point */
|
||||
GEOM_POINT, /* multipoint */
|
||||
GEOM_POINT, /* linestring */
|
||||
GEOM_LINESTRING, /* multilinestring */
|
||||
GEOM_LINESTRING, /* polygon */
|
||||
GEOM_POLYGON, /* multipolygon */
|
||||
};
|
||||
|
||||
int mb_geometry[GEOM_TYPES] = {
|
||||
VT_POINT, VT_POINT, VT_LINE, VT_LINE, VT_POLYGON, VT_POLYGON,
|
||||
};
|
||||
|
||||
void json_context(json_object *j) {
|
||||
char *s = json_stringify(j);
|
||||
|
||||
if (strlen(s) >= 500) {
|
||||
sprintf(s + 497, "...");
|
||||
}
|
||||
|
||||
fprintf(stderr, "In JSON object %s\n", s);
|
||||
free(s); // stringify
|
||||
}
|
||||
|
||||
void parse_geometry(int t, json_object *j, drawvec &out, int op, const char *fname, int line, json_object *feature) {
|
||||
if (j == NULL || j->type != JSON_ARRAY) {
|
||||
fprintf(stderr, "%s:%d: expected array for type %d\n", fname, line, t);
|
||||
json_context(feature);
|
||||
return;
|
||||
}
|
||||
|
||||
int within = geometry_within[t];
|
||||
if (within >= 0) {
|
||||
size_t i;
|
||||
for (i = 0; i < j->length; i++) {
|
||||
if (within == GEOM_POINT) {
|
||||
if (i == 0 || mb_geometry[t] == GEOM_MULTIPOINT) {
|
||||
op = VT_MOVETO;
|
||||
} else {
|
||||
op = VT_LINETO;
|
||||
}
|
||||
}
|
||||
|
||||
parse_geometry(within, j->array[i], out, op, fname, line, feature);
|
||||
}
|
||||
} else {
|
||||
if (j->length >= 2 && j->array[0]->type == JSON_NUMBER && j->array[1]->type == JSON_NUMBER) {
|
||||
long long x, y;
|
||||
double lon = j->array[0]->number;
|
||||
double lat = j->array[1]->number;
|
||||
projection->project(lon, lat, 32, &x, &y);
|
||||
|
||||
if (j->length > 2) {
|
||||
static int warned = 0;
|
||||
|
||||
if (!warned) {
|
||||
fprintf(stderr, "%s:%d: ignoring dimensions beyond two\n", fname, line);
|
||||
json_context(j);
|
||||
json_context(feature);
|
||||
warned = 1;
|
||||
}
|
||||
}
|
||||
|
||||
draw d(op, x, y);
|
||||
out.push_back(draw(op, x, y));
|
||||
} else {
|
||||
fprintf(stderr, "%s:%d: malformed point\n", fname, line);
|
||||
json_context(j);
|
||||
json_context(feature);
|
||||
}
|
||||
}
|
||||
|
||||
if (t == GEOM_POLYGON) {
|
||||
// Note that this is not using the correct meaning of closepath.
|
||||
//
|
||||
// We are using it here to close an entire Polygon, to distinguish
|
||||
// the Polygons within a MultiPolygon from each other.
|
||||
//
|
||||
// This will be undone in fix_polygon(), which needs to know which
|
||||
// rings come from which Polygons so that it can make the winding order
|
||||
// of the outer ring be the opposite of the order of the inner rings.
|
||||
|
||||
out.push_back(draw(VT_CLOSEPATH, 0, 0));
|
||||
}
|
||||
}
|
||||
|
||||
void stringify_value(json_object *value, int &type, std::string &stringified, const char *reading, int line, json_object *feature, std::string const &key, std::map<std::string, int> const *attribute_types) {
|
||||
if (value != NULL) {
|
||||
int vt = value->type;
|
||||
std::string val;
|
||||
|
||||
if (vt == JSON_STRING || vt == JSON_NUMBER) {
|
||||
val = value->string;
|
||||
} else if (vt == JSON_TRUE) {
|
||||
val = "true";
|
||||
} else if (vt == JSON_FALSE) {
|
||||
val = "false";
|
||||
} else if (vt == JSON_NULL) {
|
||||
val = "null";
|
||||
} else {
|
||||
const char *v = json_stringify(value);
|
||||
val = std::string(v);
|
||||
free((void *) v); // stringify
|
||||
}
|
||||
|
||||
auto a = (*attribute_types).find(key);
|
||||
if (a != attribute_types->end()) {
|
||||
if (a->second == mvt_string) {
|
||||
vt = JSON_STRING;
|
||||
} else if (a->second == mvt_float) {
|
||||
vt = JSON_NUMBER;
|
||||
val = std::to_string(atof(val.c_str()));
|
||||
} else if (a->second == mvt_int) {
|
||||
vt = JSON_NUMBER;
|
||||
if (val.size() == 0) {
|
||||
val = "0";
|
||||
}
|
||||
|
||||
for (size_t ii = 0; ii < val.size(); ii++) {
|
||||
char c = val[ii];
|
||||
if (c < '0' || c > '9') {
|
||||
val = std::to_string(round(atof(val.c_str())));
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (a->second == mvt_bool) {
|
||||
if (val == "false" || val == "0" || val == "null" || val.size() == 0) {
|
||||
vt = JSON_FALSE;
|
||||
val = "false";
|
||||
} else {
|
||||
vt = JSON_TRUE;
|
||||
val = "true";
|
||||
}
|
||||
} else {
|
||||
fprintf(stderr, "Can't happen: attribute type %d\n", a->second);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
if (vt == JSON_STRING) {
|
||||
type = mvt_string;
|
||||
stringified = val;
|
||||
std::string err = check_utf8(val);
|
||||
if (err != "") {
|
||||
fprintf(stderr, "%s:%d: %s\n", reading, line, err.c_str());
|
||||
json_context(feature);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
} else if (vt == JSON_NUMBER) {
|
||||
type = mvt_double;
|
||||
stringified = val;
|
||||
} else if (vt == JSON_TRUE || vt == JSON_FALSE) {
|
||||
type = mvt_bool;
|
||||
stringified = val;
|
||||
} else if (vt == JSON_NULL) {
|
||||
;
|
||||
} else {
|
||||
type = mvt_string;
|
||||
stringified = val;
|
||||
}
|
||||
}
|
||||
}
|
16
read_json.hpp
Normal file
16
read_json.hpp
Normal file
@ -0,0 +1,16 @@
|
||||
#define GEOM_POINT 0 /* array of positions */
|
||||
#define GEOM_MULTIPOINT 1 /* array of arrays of positions */
|
||||
#define GEOM_LINESTRING 2 /* array of arrays of positions */
|
||||
#define GEOM_MULTILINESTRING 3 /* array of arrays of arrays of positions */
|
||||
#define GEOM_POLYGON 4 /* array of arrays of arrays of positions */
|
||||
#define GEOM_MULTIPOLYGON 5 /* array of arrays of arrays of arrays of positions */
|
||||
#define GEOM_TYPES 6
|
||||
|
||||
extern const char *geometry_names[GEOM_TYPES];
|
||||
extern int geometry_within[GEOM_TYPES];
|
||||
extern int mb_geometry[GEOM_TYPES];
|
||||
|
||||
void json_context(json_object *j);
|
||||
void parse_geometry(int t, json_object *j, drawvec &out, int op, const char *fname, int line, json_object *feature);
|
||||
|
||||
void stringify_value(json_object *value, int &type, std::string &stringified, const char *reading, int line, json_object *feature, std::string const &key, std::map<std::string, int> const *attribute_types);
|
80
serial.cpp
80
serial.cpp
@ -230,3 +230,83 @@ void serialize_feature(FILE *geomfile, serial_feature *sf, long long *geompos, c
|
||||
serialize_byte(geomfile, sf->feature_minzoom, geompos, fname);
|
||||
}
|
||||
}
|
||||
|
||||
serial_feature deserialize_feature(FILE *geoms, long long *geompos_in, char *metabase, long long *meta_off, unsigned z, unsigned tx, unsigned ty, unsigned *initial_x, unsigned *initial_y) {
|
||||
serial_feature sf;
|
||||
|
||||
deserialize_byte_io(geoms, &sf.t, geompos_in);
|
||||
if (sf.t < 0) {
|
||||
return sf;
|
||||
}
|
||||
|
||||
deserialize_long_long_io(geoms, &sf.layer, geompos_in);
|
||||
|
||||
sf.seq = 0;
|
||||
if (sf.layer & (1 << 5)) {
|
||||
deserialize_long_long_io(geoms, &sf.seq, geompos_in);
|
||||
}
|
||||
|
||||
sf.tippecanoe_minzoom = -1;
|
||||
sf.tippecanoe_maxzoom = -1;
|
||||
sf.id = 0;
|
||||
sf.has_id = false;
|
||||
if (sf.layer & (1 << 1)) {
|
||||
deserialize_int_io(geoms, &sf.tippecanoe_minzoom, geompos_in);
|
||||
}
|
||||
if (sf.layer & (1 << 0)) {
|
||||
deserialize_int_io(geoms, &sf.tippecanoe_maxzoom, geompos_in);
|
||||
}
|
||||
if (sf.layer & (1 << 2)) {
|
||||
sf.has_id = true;
|
||||
deserialize_ulong_long_io(geoms, &sf.id, geompos_in);
|
||||
}
|
||||
|
||||
deserialize_int_io(geoms, &sf.segment, geompos_in);
|
||||
|
||||
sf.index = 0;
|
||||
sf.extent = 0;
|
||||
|
||||
sf.geometry = decode_geometry(geoms, geompos_in, z, tx, ty, sf.bbox, initial_x[sf.segment], initial_y[sf.segment]);
|
||||
if (sf.layer & (1 << 4)) {
|
||||
deserialize_ulong_long_io(geoms, &sf.index, geompos_in);
|
||||
}
|
||||
if (sf.layer & (1 << 3)) {
|
||||
deserialize_long_long_io(geoms, &sf.extent, geompos_in);
|
||||
}
|
||||
|
||||
sf.layer >>= 6;
|
||||
|
||||
sf.metapos = 0;
|
||||
{
|
||||
int m;
|
||||
deserialize_int_io(geoms, &m, geompos_in);
|
||||
sf.m = m;
|
||||
}
|
||||
if (sf.m != 0) {
|
||||
deserialize_long_long_io(geoms, &sf.metapos, geompos_in);
|
||||
}
|
||||
|
||||
if (sf.metapos >= 0) {
|
||||
char *meta = metabase + sf.metapos + meta_off[sf.segment];
|
||||
|
||||
for (size_t i = 0; i < sf.m; i++) {
|
||||
long long k, v;
|
||||
deserialize_long_long(&meta, &k);
|
||||
deserialize_long_long(&meta, &v);
|
||||
sf.keys.push_back(k);
|
||||
sf.values.push_back(v);
|
||||
}
|
||||
} else {
|
||||
for (size_t i = 0; i < sf.m; i++) {
|
||||
long long k, v;
|
||||
deserialize_long_long_io(geoms, &k, geompos_in);
|
||||
deserialize_long_long_io(geoms, &v, geompos_in);
|
||||
sf.keys.push_back(k);
|
||||
sf.values.push_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
deserialize_byte_io(geoms, &sf.feature_minzoom, geompos_in);
|
||||
|
||||
return sf;
|
||||
}
|
||||
|
11
serial.hpp
11
serial.hpp
@ -27,6 +27,11 @@ int deserialize_ulong_long_io(FILE *f, unsigned long long *n, long long *geompos
|
||||
int deserialize_uint_io(FILE *f, unsigned *n, long long *geompos);
|
||||
int deserialize_byte_io(FILE *f, signed char *n, long long *geompos);
|
||||
|
||||
struct serial_val {
|
||||
int type;
|
||||
std::string s;
|
||||
};
|
||||
|
||||
struct serial_feature {
|
||||
long long layer;
|
||||
int segment;
|
||||
@ -52,8 +57,14 @@ struct serial_feature {
|
||||
std::vector<long long> keys;
|
||||
std::vector<long long> values;
|
||||
long long metapos;
|
||||
|
||||
// XXX This isn't serialized. Should it be here?
|
||||
long long bbox[4];
|
||||
std::vector<std::string> full_keys;
|
||||
std::vector<serial_val> full_values;
|
||||
};
|
||||
|
||||
void serialize_feature(FILE *geomfile, serial_feature *sf, long long *geompos, const char *fname, long long wx, long long wy, bool include_minzoom);
|
||||
serial_feature deserialize_feature(FILE *geoms, long long *geompos_in, char *metabase, long long *meta_off, unsigned z, unsigned tx, unsigned ty, unsigned *initial_x, unsigned *initial_y);
|
||||
|
||||
#endif
|
||||
|
3
tests/filter/remove
Executable file
3
tests/filter/remove
Executable file
@ -0,0 +1,3 @@
|
||||
#!/bin/sh
|
||||
|
||||
sed 's/"layer": "[^"]*",*//'
|
3
tests/filter/rename
Executable file
3
tests/filter/rename
Executable file
@ -0,0 +1,3 @@
|
||||
#!/bin/sh
|
||||
|
||||
sed 's/"layer": "[^"]*"/"layer": "renamed"/'
|
3
tests/filter/rename2
Executable file
3
tests/filter/rename2
Executable file
@ -0,0 +1,3 @@
|
||||
#!/bin/sh
|
||||
|
||||
sed 's/"layer": "[^"]*"/"layer": "renamed_again"/'
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -4,7 +4,7 @@
|
||||
"center": "-122.299805,37.892187,12",
|
||||
"description": "macarthur description",
|
||||
"format": "pbf",
|
||||
"json": "{\"vector_layers\": [ { \"id\": \"macarthur\", \"description\": \"\", \"minzoom\": 5, \"maxzoom\": 11, \"fields\": {\"FULLNAME\": \"String\", \"LINEARID\": \"String\", \"MTFCC\": \"String\", \"RTTYP\": \"String\"} } ],\"tilestats\": {\"layerCount\": 1,\"layers\": [{\"layer\": \"macarthur\",\"count\": 0,\"geometry\": \"Point\",\"attributeCount\": 4,\"attributes\": [{\"attribute\": \"FULLNAME\",\"count\": 5,\"type\": \"string\",\"values\": [\"Macarthur\",\"Macarthur Blvd\",\"Macarthur Fwy\",\"W Macarthur\",\"W Macarthur Blvd\"]},{\"attribute\": \"LINEARID\",\"count\": 43,\"type\": \"string\",\"values\": [\"1102155930810\",\"1102156217102\",\"1102156241736\",\"1102156248968\",\"1102156510290\",\"1102157509691\",\"1102157651658\",\"1102406970092\",\"1102406970093\",\"1102406970094\",\"1102406970095\",\"1102407366406\",\"1102638069562\",\"1102638078801\",\"1102654601627\",\"1102654601663\",\"1102654602215\",\"1102954189105\",\"1102954918511\",\"1103690383700\",\"1103690474249\",\"1103690474250\",\"1103690483026\",\"1103690483032\",\"1103717593123\",\"1104469713187\",\"1104469713198\",\"1104474748623\",\"1104475134288\",\"1104475134436\",\"1104485605278\",\"1104485645649\",\"1104485773833\",\"1104486090991\",\"1104486392881\",\"1105089436004\",\"1105089465114\",\"1105089465116\",\"1105281275434\",\"1105281275687\",\"1105281275688\",\"1105281275689\",\"1105281275692\"]},{\"attribute\": \"MTFCC\",\"count\": 2,\"type\": \"string\",\"values\": [\"S1100\",\"S1400\"]},{\"attribute\": \"RTTYP\",\"count\": 1,\"type\": \"string\",\"values\": [\"M\"]}]}]}}",
|
||||
"json": "{\"vector_layers\": [ { \"id\": \"macarthur\", \"description\": \"\", \"minzoom\": 5, \"maxzoom\": 11, \"fields\": {\"FULLNAME\": \"String\", \"LINEARID\": \"String\", \"MTFCC\": \"String\", \"RTTYP\": \"String\"} } ],\"tilestats\": {\"layerCount\": 1,\"layers\": [{\"layer\": \"macarthur\",\"count\": 169,\"geometry\": \"LineString\",\"attributeCount\": 4,\"attributes\": [{\"attribute\": \"FULLNAME\",\"count\": 5,\"type\": \"string\",\"values\": [\"Macarthur\",\"Macarthur Blvd\",\"Macarthur Fwy\",\"W Macarthur\",\"W Macarthur Blvd\"]},{\"attribute\": \"LINEARID\",\"count\": 43,\"type\": \"string\",\"values\": [\"1102155930810\",\"1102156217102\",\"1102156241736\",\"1102156248968\",\"1102156510290\",\"1102157509691\",\"1102157651658\",\"1102406970092\",\"1102406970093\",\"1102406970094\",\"1102406970095\",\"1102407366406\",\"1102638069562\",\"1102638078801\",\"1102654601627\",\"1102654601663\",\"1102654602215\",\"1102954189105\",\"1102954918511\",\"1103690383700\",\"1103690474249\",\"1103690474250\",\"1103690483026\",\"1103690483032\",\"1103717593123\",\"1104469713187\",\"1104469713198\",\"1104474748623\",\"1104475134288\",\"1104475134436\",\"1104485605278\",\"1104485645649\",\"1104485773833\",\"1104486090991\",\"1104486392881\",\"1105089436004\",\"1105089465114\",\"1105089465116\",\"1105281275434\",\"1105281275687\",\"1105281275688\",\"1105281275689\",\"1105281275692\"]},{\"attribute\": \"MTFCC\",\"count\": 2,\"type\": \"string\",\"values\": [\"S1100\",\"S1400\"]},{\"attribute\": \"RTTYP\",\"count\": 1,\"type\": \"string\",\"values\": [\"M\"]}]}]}}",
|
||||
"maxzoom": "12",
|
||||
"minzoom": "0",
|
||||
"name": "macarthur name",
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -3,7 +3,7 @@
|
||||
"center": "-122.167969,37.833010,10",
|
||||
"description": "tests/join-population/macarthur.mbtiles",
|
||||
"format": "pbf",
|
||||
"json": "{\"vector_layers\": [ { \"id\": \"macarthur\", \"description\": \"\", \"minzoom\": 5, \"maxzoom\": 10, \"fields\": {\"FULLNAME\": \"String\", \"LINEARID\": \"String\", \"MTFCC\": \"String\", \"RTTYP\": \"String\"} } ],\"tilestats\": {\"layerCount\": 1,\"layers\": [{\"layer\": \"macarthur\",\"count\": 0,\"geometry\": \"Point\",\"attributeCount\": 4,\"attributes\": [{\"attribute\": \"FULLNAME\",\"count\": 3,\"type\": \"string\",\"values\": [\"Macarthur\",\"Macarthur Fwy\",\"W Macarthur\"]},{\"attribute\": \"LINEARID\",\"count\": 4,\"type\": \"string\",\"values\": [\"1102156510290\",\"1104486392881\",\"first\",\"second\"]},{\"attribute\": \"MTFCC\",\"count\": 2,\"type\": \"string\",\"values\": [\"S1100\",\"S1400\"]},{\"attribute\": \"RTTYP\",\"count\": 1,\"type\": \"string\",\"values\": [\"M\"]}]}]}}",
|
||||
"json": "{\"vector_layers\": [ { \"id\": \"macarthur\", \"description\": \"\", \"minzoom\": 5, \"maxzoom\": 10, \"fields\": {\"FULLNAME\": \"String\", \"LINEARID\": \"String\", \"MTFCC\": \"String\", \"RTTYP\": \"String\"} } ],\"tilestats\": {\"layerCount\": 1,\"layers\": [{\"layer\": \"macarthur\",\"count\": 90,\"geometry\": \"LineString\",\"attributeCount\": 4,\"attributes\": [{\"attribute\": \"FULLNAME\",\"count\": 3,\"type\": \"string\",\"values\": [\"Macarthur\",\"Macarthur Fwy\",\"W Macarthur\"]},{\"attribute\": \"LINEARID\",\"count\": 4,\"type\": \"string\",\"values\": [\"1102156510290\",\"1104486392881\",\"first\",\"second\"]},{\"attribute\": \"MTFCC\",\"count\": 2,\"type\": \"string\",\"values\": [\"S1100\",\"S1400\"]},{\"attribute\": \"RTTYP\",\"count\": 1,\"type\": \"string\",\"values\": [\"M\"]}]}]}}",
|
||||
"maxzoom": "10",
|
||||
"minzoom": "5",
|
||||
"name": "tests/join-population/macarthur.mbtiles",
|
||||
|
4858
tests/muni/decode/multi.mbtiles.pipeline.json
Normal file
4858
tests/muni/decode/multi.mbtiles.pipeline.json
Normal file
File diff suppressed because it is too large
Load Diff
1772
tests/ne_110m_populated_places/out/-yNAME_-Ccat_-z5.json
Normal file
1772
tests/ne_110m_populated_places/out/-yNAME_-Ccat_-z5.json
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
1772
tests/ne_110m_populated_places/out/-yNAME_-z5_-ccat.json
Normal file
1772
tests/ne_110m_populated_places/out/-yNAME_-z5_-ccat.json
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -92,12 +92,6 @@ void handle(std::string message, int z, unsigned x, unsigned y, std::map<std::st
|
||||
}
|
||||
}
|
||||
|
||||
if (layermap.count(layer.name) == 0) {
|
||||
layermap.insert(std::pair<std::string, layermap_entry>(layer.name, layermap_entry(layermap.size())));
|
||||
auto file_keys = layermap.find(layer.name);
|
||||
file_keys->second.minzoom = z;
|
||||
file_keys->second.maxzoom = z;
|
||||
}
|
||||
auto file_keys = layermap.find(layer.name);
|
||||
|
||||
for (size_t f = 0; f < layer.features.size(); f++) {
|
||||
@ -207,6 +201,13 @@ void handle(std::string message, int z, unsigned x, unsigned y, std::map<std::st
|
||||
}
|
||||
|
||||
if (matched || !ifmatched) {
|
||||
if (file_keys == layermap.end()) {
|
||||
layermap.insert(std::pair<std::string, layermap_entry>(layer.name, layermap_entry(layermap.size())));
|
||||
file_keys = layermap.find(layer.name);
|
||||
file_keys->second.minzoom = z;
|
||||
file_keys->second.maxzoom = z;
|
||||
}
|
||||
|
||||
// To keep attributes in their original order instead of alphabetical
|
||||
for (auto k : key_order) {
|
||||
auto fa = attributes.find(k);
|
||||
@ -237,6 +238,14 @@ void handle(std::string message, int z, unsigned x, unsigned y, std::map<std::st
|
||||
if (z > file_keys->second.maxzoom) {
|
||||
file_keys->second.maxzoom = z;
|
||||
}
|
||||
|
||||
if (feat.type == mvt_point) {
|
||||
file_keys->second.points++;
|
||||
} else if (feat.type == mvt_linestring) {
|
||||
file_keys->second.lines++;
|
||||
} else if (feat.type == mvt_polygon) {
|
||||
file_keys->second.polygons++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -976,7 +985,10 @@ void readcsv(char *fn, std::vector<std::string> &header, std::map<std::string, s
|
||||
}
|
||||
}
|
||||
|
||||
fclose(f);
|
||||
if (fclose(f) != 0) {
|
||||
perror("fclose");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
int main(int argc, char **argv) {
|
||||
|
660
tile.cpp
660
tile.cpp
@ -1,3 +1,7 @@
|
||||
#ifdef __APPLE__
|
||||
#define _DARWIN_UNLIMITED_STREAMS
|
||||
#endif
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <string>
|
||||
@ -20,6 +24,8 @@
|
||||
#include <pthread.h>
|
||||
#include <errno.h>
|
||||
#include <time.h>
|
||||
#include <fcntl.h>
|
||||
#include <sys/wait.h>
|
||||
#include "mvt.hpp"
|
||||
#include "mbtiles.hpp"
|
||||
#include "dirtiles.hpp"
|
||||
@ -30,6 +36,13 @@
|
||||
#include "serial.hpp"
|
||||
#include "options.hpp"
|
||||
#include "main.hpp"
|
||||
#include "write_json.hpp"
|
||||
|
||||
extern "C" {
|
||||
#include "jsonpull/jsonpull.h"
|
||||
}
|
||||
|
||||
#include "plugin.hpp"
|
||||
|
||||
#define CMD_BITS 3
|
||||
|
||||
@ -61,13 +74,13 @@ bool draws_something(drawvec &geom) {
|
||||
|
||||
int metacmp(int m1, const std::vector<long long> &keys1, const std::vector<long long> &values1, char *stringpool1, int m2, const std::vector<long long> &keys2, const std::vector<long long> &values2, char *stringpool2);
|
||||
int coalindexcmp(const struct coalesce *c1, const struct coalesce *c2);
|
||||
static int is_integer(const char *s, long long *v);
|
||||
|
||||
struct coalesce {
|
||||
char *meta;
|
||||
char *stringpool;
|
||||
std::vector<long long> keys;
|
||||
std::vector<long long> values;
|
||||
std::vector<std::string> full_keys;
|
||||
std::vector<serial_val> full_values;
|
||||
drawvec geom;
|
||||
unsigned long long index;
|
||||
unsigned long long index2;
|
||||
@ -135,40 +148,10 @@ mvt_value retrieve_string(long long off, char *stringpool, int *otype) {
|
||||
*otype = type;
|
||||
}
|
||||
|
||||
mvt_value tv;
|
||||
if (type == mvt_double) {
|
||||
long long v;
|
||||
if (is_integer(s, &v)) {
|
||||
if (v >= 0) {
|
||||
tv.type = mvt_int;
|
||||
tv.numeric_value.int_value = v;
|
||||
} else {
|
||||
tv.type = mvt_sint;
|
||||
tv.numeric_value.sint_value = v;
|
||||
}
|
||||
} else {
|
||||
double d = atof(s);
|
||||
|
||||
if (d == (float) d) {
|
||||
tv.type = mvt_float;
|
||||
tv.numeric_value.float_value = d;
|
||||
} else {
|
||||
tv.type = mvt_double;
|
||||
tv.numeric_value.double_value = d;
|
||||
}
|
||||
}
|
||||
} else if (type == mvt_bool) {
|
||||
tv.type = mvt_bool;
|
||||
tv.numeric_value.bool_value = (s[0] == 't');
|
||||
} else {
|
||||
tv.type = mvt_string;
|
||||
tv.string_value = s;
|
||||
}
|
||||
|
||||
return tv;
|
||||
return stringified_to_mvt_value(type, s);
|
||||
}
|
||||
|
||||
void decode_meta(int m, std::vector<long long> &metakeys, std::vector<long long> &metavals, char *stringpool, mvt_layer &layer, mvt_feature &feature) {
|
||||
void decode_meta(int m, std::vector<long long> const &metakeys, std::vector<long long> const &metavals, char *stringpool, mvt_layer &layer, mvt_feature &feature) {
|
||||
int i;
|
||||
for (i = 0; i < m; i++) {
|
||||
int otype;
|
||||
@ -222,37 +205,6 @@ int metacmp(int m1, const std::vector<long long> &keys1, const std::vector<long
|
||||
}
|
||||
}
|
||||
|
||||
static int is_integer(const char *s, long long *v) {
|
||||
errno = 0;
|
||||
char *endptr;
|
||||
|
||||
*v = strtoll(s, &endptr, 0);
|
||||
if (*v == 0 && errno != 0) {
|
||||
return 0;
|
||||
}
|
||||
if ((*v == LLONG_MIN || *v == LLONG_MAX) && (errno == ERANGE)) {
|
||||
return 0;
|
||||
}
|
||||
if (*endptr != '\0') {
|
||||
// Special case: If it is an integer followed by .0000 or similar,
|
||||
// it is still an integer
|
||||
|
||||
if (*endptr != '.') {
|
||||
return 0;
|
||||
}
|
||||
endptr++;
|
||||
for (; *endptr != '\0'; endptr++) {
|
||||
if (*endptr != '0') {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
void rewrite(drawvec &geom, int z, int nextzoom, int maxzoom, long long *bbox, unsigned tx, unsigned ty, int buffer, int line_detail, int *within, long long *geompos, FILE **geomfile, const char *fname, signed char t, int layer, long long metastart, signed char feature_minzoom, int child_shards, int max_zoom_increment, long long seq, int tippecanoe_minzoom, int tippecanoe_maxzoom, int segment, unsigned *initial_x, unsigned *initial_y, int m, std::vector<long long> &metakeys, std::vector<long long> &metavals, bool has_id, unsigned long long id, unsigned long long index, long long extent) {
|
||||
if (geom.size() > 0 && (nextzoom <= maxzoom || additional[A_EXTEND_ZOOMS])) {
|
||||
int xo, yo;
|
||||
@ -365,8 +317,9 @@ struct partial {
|
||||
std::vector<drawvec> geoms;
|
||||
std::vector<long long> keys;
|
||||
std::vector<long long> values;
|
||||
std::vector<std::string> full_keys;
|
||||
std::vector<serial_val> full_values;
|
||||
std::vector<ssize_t> arc_polygon;
|
||||
char *meta;
|
||||
long long layer;
|
||||
long long original_seq;
|
||||
unsigned long long index;
|
||||
@ -1221,16 +1174,236 @@ struct write_tile_args {
|
||||
long long minextent_out;
|
||||
double fraction;
|
||||
double fraction_out;
|
||||
const char *prefilter;
|
||||
const char *postfilter;
|
||||
bool still_dropping;
|
||||
int wrote_zoom;
|
||||
size_t tiling_seg;
|
||||
};
|
||||
|
||||
long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *stringpool, int z, unsigned tx, unsigned ty, int detail, int min_detail, int basezoom, sqlite3 *outdb, const char *outdir, double droprate, int buffer, const char *fname, FILE **geomfile, int minzoom, int maxzoom, double todo, volatile long long *along, long long alongminus, double gamma, int child_shards, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, volatile int *running, double simplification, std::vector<std::map<std::string, layermap_entry>> *layermaps, std::vector<std::vector<std::string>> *layer_unmaps, size_t pass, size_t passes, unsigned long long mingap, long long minextent, double fraction, write_tile_args *arg) {
|
||||
bool clip_to_tile(serial_feature &sf, int z, long long buffer) {
|
||||
int quick = quick_check(sf.bbox, z, buffer);
|
||||
if (quick == 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (z == 0) {
|
||||
if (sf.bbox[0] < 0 || sf.bbox[2] > 1LL << 32) {
|
||||
// If the geometry extends off the edge of the world, concatenate on another copy
|
||||
// shifted by 360 degrees, and then make sure both copies get clipped down to size.
|
||||
|
||||
size_t n = sf.geometry.size();
|
||||
|
||||
if (sf.bbox[0] < 0) {
|
||||
for (size_t i = 0; i < n; i++) {
|
||||
sf.geometry.push_back(draw(sf.geometry[i].op, sf.geometry[i].x + (1LL << 32), sf.geometry[i].y));
|
||||
}
|
||||
}
|
||||
|
||||
if (sf.bbox[2] > 1LL << 32) {
|
||||
for (size_t i = 0; i < n; i++) {
|
||||
sf.geometry.push_back(draw(sf.geometry[i].op, sf.geometry[i].x - (1LL << 32), sf.geometry[i].y));
|
||||
}
|
||||
}
|
||||
|
||||
sf.bbox[0] = 0;
|
||||
sf.bbox[2] = 1LL << 32;
|
||||
|
||||
quick = -1;
|
||||
}
|
||||
}
|
||||
|
||||
// Can't accept the quick check if guaranteeing no duplication, since the
|
||||
// overlap might have been in the buffer.
|
||||
if (quick != 1 || prevent[P_DUPLICATION]) {
|
||||
drawvec clipped;
|
||||
|
||||
// Do the clipping, even if we are going to include the whole feature,
|
||||
// so that we can know whether the feature itself, or only the feature's
|
||||
// bounding box, touches the tile.
|
||||
|
||||
if (sf.t == VT_LINE) {
|
||||
clipped = clip_lines(sf.geometry, z, buffer);
|
||||
}
|
||||
if (sf.t == VT_POLYGON) {
|
||||
clipped = simple_clip_poly(sf.geometry, z, buffer);
|
||||
}
|
||||
if (sf.t == VT_POINT) {
|
||||
clipped = clip_point(sf.geometry, z, buffer);
|
||||
}
|
||||
|
||||
clipped = remove_noop(clipped, sf.t, 0);
|
||||
|
||||
// Must clip at z0 even if we don't want clipping, to handle features
|
||||
// that are duplicated across the date line
|
||||
|
||||
if (prevent[P_DUPLICATION] && z != 0) {
|
||||
if (point_within_tile((sf.bbox[0] + sf.bbox[2]) / 2, (sf.bbox[1] + sf.bbox[3]) / 2, z, buffer)) {
|
||||
// sf.geometry is unchanged
|
||||
} else {
|
||||
sf.geometry.clear();
|
||||
}
|
||||
} else if (prevent[P_CLIPPING] && z != 0) {
|
||||
if (clipped.size() == 0) {
|
||||
sf.geometry.clear();
|
||||
} else {
|
||||
// sf.geometry is unchanged
|
||||
}
|
||||
} else {
|
||||
sf.geometry = clipped;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
serial_feature next_feature(FILE *geoms, long long *geompos_in, char *metabase, long long *meta_off, int z, unsigned tx, unsigned ty, unsigned *initial_x, unsigned *initial_y, long long *original_features, long long *unclipped_features, int nextzoom, int maxzoom, int minzoom, int max_zoom_increment, size_t pass, size_t passes, volatile long long *along, long long alongminus, int buffer, int *within, bool *first_time, int line_detail, FILE **geomfile, long long *geompos, volatile double *oprogress, double todo, const char *fname, int child_shards) {
|
||||
while (1) {
|
||||
serial_feature sf = deserialize_feature(geoms, geompos_in, metabase, meta_off, z, tx, ty, initial_x, initial_y);
|
||||
if (sf.t < 0) {
|
||||
return sf;
|
||||
}
|
||||
|
||||
double progress = floor(((((*geompos_in + *along - alongminus) / (double) todo) + (pass - (2 - passes))) / passes + z) / (maxzoom + 1) * 1000) / 10;
|
||||
if (progress >= *oprogress + 0.1) {
|
||||
if (!quiet) {
|
||||
fprintf(stderr, " %3.1f%% %d/%u/%u \r", progress, z, tx, ty);
|
||||
}
|
||||
*oprogress = progress;
|
||||
}
|
||||
|
||||
(*original_features)++;
|
||||
|
||||
if (clip_to_tile(sf, z, buffer)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (sf.geometry.size() > 0) {
|
||||
(*unclipped_features)++;
|
||||
}
|
||||
|
||||
if (*first_time && pass == 1) { /* only write out the next zoom once, even if we retry */
|
||||
if (sf.tippecanoe_maxzoom == -1 || sf.tippecanoe_maxzoom >= nextzoom) {
|
||||
rewrite(sf.geometry, z, nextzoom, maxzoom, sf.bbox, tx, ty, buffer, line_detail, within, geompos, geomfile, fname, sf.t, sf.layer, sf.metapos, sf.feature_minzoom, child_shards, max_zoom_increment, sf.seq, sf.tippecanoe_minzoom, sf.tippecanoe_maxzoom, sf.segment, initial_x, initial_y, sf.m, sf.keys, sf.values, sf.has_id, sf.id, sf.index, sf.extent);
|
||||
}
|
||||
}
|
||||
|
||||
if (z < minzoom) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (sf.tippecanoe_minzoom != -1 && z < sf.tippecanoe_minzoom) {
|
||||
continue;
|
||||
}
|
||||
if (sf.tippecanoe_maxzoom != -1 && z > sf.tippecanoe_maxzoom) {
|
||||
continue;
|
||||
}
|
||||
if (sf.tippecanoe_minzoom == -1 && z < sf.feature_minzoom) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return sf;
|
||||
}
|
||||
}
|
||||
|
||||
struct run_prefilter_args {
|
||||
FILE *geoms;
|
||||
long long *geompos_in;
|
||||
char *metabase;
|
||||
long long *meta_off;
|
||||
int z;
|
||||
unsigned tx;
|
||||
unsigned ty;
|
||||
unsigned *initial_x;
|
||||
unsigned *initial_y;
|
||||
long long *original_features;
|
||||
long long *unclipped_features;
|
||||
int nextzoom;
|
||||
int maxzoom;
|
||||
int minzoom;
|
||||
int max_zoom_increment;
|
||||
size_t pass;
|
||||
size_t passes;
|
||||
volatile long long *along;
|
||||
long long alongminus;
|
||||
int buffer;
|
||||
int *within;
|
||||
bool *first_time;
|
||||
int line_detail;
|
||||
FILE **geomfile;
|
||||
long long *geompos;
|
||||
volatile double *oprogress;
|
||||
double todo;
|
||||
const char *fname;
|
||||
int child_shards;
|
||||
std::vector<std::vector<std::string>> *layer_unmaps;
|
||||
char *stringpool;
|
||||
long long *pool_off;
|
||||
FILE *prefilter_fp;
|
||||
};
|
||||
|
||||
void *run_prefilter(void *v) {
|
||||
run_prefilter_args *rpa = (run_prefilter_args *) v;
|
||||
|
||||
while (1) {
|
||||
serial_feature sf = next_feature(rpa->geoms, rpa->geompos_in, rpa->metabase, rpa->meta_off, rpa->z, rpa->tx, rpa->ty, rpa->initial_x, rpa->initial_y, rpa->original_features, rpa->unclipped_features, rpa->nextzoom, rpa->maxzoom, rpa->minzoom, rpa->max_zoom_increment, rpa->pass, rpa->passes, rpa->along, rpa->alongminus, rpa->buffer, rpa->within, rpa->first_time, rpa->line_detail, rpa->geomfile, rpa->geompos, rpa->oprogress, rpa->todo, rpa->fname, rpa->child_shards);
|
||||
if (sf.t < 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
mvt_layer tmp_layer;
|
||||
tmp_layer.extent = 1LL << 32;
|
||||
tmp_layer.name = (*(rpa->layer_unmaps))[sf.segment][sf.layer];
|
||||
|
||||
if (sf.t == VT_POLYGON) {
|
||||
sf.geometry = close_poly(sf.geometry);
|
||||
}
|
||||
|
||||
mvt_feature tmp_feature;
|
||||
tmp_feature.type = sf.t;
|
||||
tmp_feature.geometry = to_feature(sf.geometry);
|
||||
tmp_feature.id = sf.id;
|
||||
tmp_feature.has_id = sf.has_id;
|
||||
|
||||
// Offset from tile coordinates back to world coordinates
|
||||
unsigned sx = 0, sy = 0;
|
||||
if (rpa->z != 0) {
|
||||
sx = rpa->tx << (32 - rpa->z);
|
||||
sy = rpa->ty << (32 - rpa->z);
|
||||
}
|
||||
for (size_t i = 0; i < tmp_feature.geometry.size(); i++) {
|
||||
tmp_feature.geometry[i].x += sx;
|
||||
tmp_feature.geometry[i].y += sy;
|
||||
}
|
||||
|
||||
decode_meta(sf.m, sf.keys, sf.values, rpa->stringpool + rpa->pool_off[sf.segment], tmp_layer, tmp_feature);
|
||||
tmp_layer.features.push_back(tmp_feature);
|
||||
|
||||
layer_to_geojson(rpa->prefilter_fp, tmp_layer, 0, 0, 0, false, true, false, sf.index, sf.seq, sf.extent, true);
|
||||
}
|
||||
|
||||
if (fclose(rpa->prefilter_fp) != 0) {
|
||||
if (errno == EPIPE) {
|
||||
static bool warned = false;
|
||||
if (!warned) {
|
||||
fprintf(stderr, "Warning: broken pipe in prefilter\n");
|
||||
warned = true;
|
||||
}
|
||||
} else {
|
||||
perror("fclose output to prefilter");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *stringpool, int z, unsigned tx, unsigned ty, int detail, int min_detail, int basezoom, sqlite3 *outdb, const char *outdir, double droprate, int buffer, const char *fname, FILE **geomfile, int minzoom, int maxzoom, double todo, volatile long long *along, long long alongminus, double gamma, int child_shards, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, volatile int *running, double simplification, std::vector<std::map<std::string, layermap_entry>> *layermaps, std::vector<std::vector<std::string>> *layer_unmaps, size_t tiling_seg, size_t pass, size_t passes, unsigned long long mingap, long long minextent, double fraction, const char *prefilter, const char *postfilter, write_tile_args *arg) {
|
||||
int line_detail;
|
||||
double merge_fraction = 1;
|
||||
double mingap_fraction = 1;
|
||||
double minextent_fraction = 1;
|
||||
|
||||
static volatile double oprogress = 0;
|
||||
long long og = *geompos_in;
|
||||
|
||||
// XXX is there a way to do this without floating point?
|
||||
@ -1253,7 +1426,6 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
}
|
||||
}
|
||||
|
||||
static volatile double oprogress = 0;
|
||||
bool has_polygons = false;
|
||||
|
||||
bool first_time = true;
|
||||
@ -1291,204 +1463,97 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
*geompos_in = og;
|
||||
}
|
||||
|
||||
int prefilter_write = -1, prefilter_read = -1;
|
||||
pid_t prefilter_pid = 0;
|
||||
FILE *prefilter_fp = NULL;
|
||||
pthread_t prefilter_writer;
|
||||
run_prefilter_args rpa; // here so it stays in scope until joined
|
||||
FILE *prefilter_read_fp = NULL;
|
||||
json_pull *prefilter_jp = NULL;
|
||||
|
||||
if (prefilter != NULL) {
|
||||
setup_filter(prefilter, &prefilter_write, &prefilter_read, &prefilter_pid, z, tx, ty);
|
||||
prefilter_fp = fdopen(prefilter_write, "w");
|
||||
if (prefilter_fp == NULL) {
|
||||
perror("freopen prefilter");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
rpa.geoms = geoms;
|
||||
rpa.geompos_in = geompos_in;
|
||||
rpa.metabase = metabase;
|
||||
rpa.meta_off = meta_off;
|
||||
rpa.z = z;
|
||||
rpa.tx = tx;
|
||||
rpa.ty = ty;
|
||||
rpa.initial_x = initial_x;
|
||||
rpa.initial_y = initial_y;
|
||||
rpa.original_features = &original_features;
|
||||
rpa.unclipped_features = &unclipped_features;
|
||||
rpa.nextzoom = nextzoom;
|
||||
rpa.maxzoom = maxzoom;
|
||||
rpa.minzoom = minzoom;
|
||||
rpa.max_zoom_increment = max_zoom_increment;
|
||||
rpa.pass = pass;
|
||||
rpa.passes = passes;
|
||||
rpa.along = along;
|
||||
rpa.alongminus = alongminus;
|
||||
rpa.buffer = buffer;
|
||||
rpa.within = within;
|
||||
rpa.first_time = &first_time;
|
||||
rpa.line_detail = line_detail;
|
||||
rpa.geomfile = geomfile;
|
||||
rpa.geompos = geompos;
|
||||
rpa.oprogress = &oprogress;
|
||||
rpa.todo = todo;
|
||||
rpa.fname = fname;
|
||||
rpa.child_shards = child_shards;
|
||||
rpa.prefilter_fp = prefilter_fp;
|
||||
rpa.layer_unmaps = layer_unmaps;
|
||||
rpa.stringpool = stringpool;
|
||||
rpa.pool_off = pool_off;
|
||||
|
||||
if (pthread_create(&prefilter_writer, NULL, run_prefilter, &rpa) != 0) {
|
||||
perror("pthread_create (prefilter writer)");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
prefilter_read_fp = fdopen(prefilter_read, "r");
|
||||
if (prefilter_read_fp == NULL) {
|
||||
perror("fdopen prefilter output");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
prefilter_jp = json_begin_file(prefilter_read_fp);
|
||||
}
|
||||
|
||||
while (1) {
|
||||
signed char t;
|
||||
deserialize_byte_io(geoms, &t, geompos_in);
|
||||
if (t < 0) {
|
||||
serial_feature sf;
|
||||
|
||||
if (prefilter == NULL) {
|
||||
sf = next_feature(geoms, geompos_in, metabase, meta_off, z, tx, ty, initial_x, initial_y, &original_features, &unclipped_features, nextzoom, maxzoom, minzoom, max_zoom_increment, pass, passes, along, alongminus, buffer, within, &first_time, line_detail, geomfile, geompos, &oprogress, todo, fname, child_shards);
|
||||
} else {
|
||||
sf = parse_feature(prefilter_jp, z, tx, ty, layermaps, tiling_seg, layer_unmaps, postfilter != NULL);
|
||||
}
|
||||
|
||||
if (sf.t < 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
long long xlayer;
|
||||
deserialize_long_long_io(geoms, &xlayer, geompos_in);
|
||||
|
||||
long long original_seq = 0;
|
||||
if (xlayer & (1 << 5)) {
|
||||
deserialize_long_long_io(geoms, &original_seq, geompos_in);
|
||||
}
|
||||
|
||||
int tippecanoe_minzoom = -1, tippecanoe_maxzoom = -1;
|
||||
unsigned long long id = 0;
|
||||
bool has_id = false;
|
||||
if (xlayer & (1 << 1)) {
|
||||
deserialize_int_io(geoms, &tippecanoe_minzoom, geompos_in);
|
||||
}
|
||||
if (xlayer & (1 << 0)) {
|
||||
deserialize_int_io(geoms, &tippecanoe_maxzoom, geompos_in);
|
||||
}
|
||||
if (xlayer & (1 << 2)) {
|
||||
has_id = true;
|
||||
deserialize_ulong_long_io(geoms, &id, geompos_in);
|
||||
}
|
||||
long long layer = xlayer >> 6;
|
||||
|
||||
int segment;
|
||||
deserialize_int_io(geoms, &segment, geompos_in);
|
||||
|
||||
long long bbox[4];
|
||||
unsigned long long index = 0;
|
||||
long long extent = 0;
|
||||
|
||||
drawvec geom = decode_geometry(geoms, geompos_in, z, tx, ty, line_detail, bbox, initial_x[segment], initial_y[segment]);
|
||||
if (xlayer & (1 << 4)) {
|
||||
deserialize_ulong_long_io(geoms, &index, geompos_in);
|
||||
}
|
||||
if (xlayer & (1 << 3)) {
|
||||
deserialize_long_long_io(geoms, &extent, geompos_in);
|
||||
}
|
||||
|
||||
long long metastart = 0;
|
||||
int m;
|
||||
deserialize_int_io(geoms, &m, geompos_in);
|
||||
if (m != 0) {
|
||||
deserialize_long_long_io(geoms, &metastart, geompos_in);
|
||||
}
|
||||
char *meta = NULL;
|
||||
std::vector<long long> metakeys, metavals;
|
||||
|
||||
if (metastart >= 0) {
|
||||
meta = metabase + metastart + meta_off[segment];
|
||||
|
||||
for (int i = 0; i < m; i++) {
|
||||
long long k, v;
|
||||
deserialize_long_long(&meta, &k);
|
||||
deserialize_long_long(&meta, &v);
|
||||
metakeys.push_back(k);
|
||||
metavals.push_back(v);
|
||||
}
|
||||
} else {
|
||||
for (int i = 0; i < m; i++) {
|
||||
long long k, v;
|
||||
deserialize_long_long_io(geoms, &k, geompos_in);
|
||||
deserialize_long_long_io(geoms, &v, geompos_in);
|
||||
metakeys.push_back(k);
|
||||
metavals.push_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
signed char feature_minzoom;
|
||||
deserialize_byte_io(geoms, &feature_minzoom, geompos_in);
|
||||
|
||||
double progress = floor(((((*geompos_in + *along - alongminus) / (double) todo) + (pass - (2 - passes))) / passes + z) / (maxzoom + 1) * 1000) / 10;
|
||||
if (progress >= oprogress + 0.1) {
|
||||
if (!quiet) {
|
||||
fprintf(stderr, " %3.1f%% %d/%u/%u \r", progress, z, tx, ty);
|
||||
}
|
||||
oprogress = progress;
|
||||
}
|
||||
|
||||
original_features++;
|
||||
|
||||
int quick = quick_check(bbox, z, line_detail, buffer);
|
||||
if (quick == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (z == 0) {
|
||||
if (bbox[0] < 0 || bbox[2] > 1LL << 32) {
|
||||
// If the geometry extends off the edge of the world, concatenate on another copy
|
||||
// shifted by 360 degrees, and then make sure both copies get clipped down to size.
|
||||
|
||||
size_t n = geom.size();
|
||||
|
||||
if (bbox[0] < 0) {
|
||||
for (size_t i = 0; i < n; i++) {
|
||||
geom.push_back(draw(geom[i].op, geom[i].x + (1LL << 32), geom[i].y));
|
||||
}
|
||||
}
|
||||
|
||||
if (bbox[2] > 1LL << 32) {
|
||||
for (size_t i = 0; i < n; i++) {
|
||||
geom.push_back(draw(geom[i].op, geom[i].x - (1LL << 32), geom[i].y));
|
||||
}
|
||||
}
|
||||
|
||||
bbox[0] = 0;
|
||||
bbox[2] = 1LL << 32;
|
||||
|
||||
quick = -1;
|
||||
}
|
||||
}
|
||||
|
||||
// Can't accept the quick check if guaranteeing no duplication, since the
|
||||
// overlap might have been in the buffer.
|
||||
if (quick != 1 || prevent[P_DUPLICATION]) {
|
||||
drawvec clipped;
|
||||
|
||||
// Do the clipping, even if we are going to include the whole feature,
|
||||
// so that we can know whether the feature itself, or only the feature's
|
||||
// bounding box, touches the tile.
|
||||
|
||||
if (t == VT_LINE) {
|
||||
clipped = clip_lines(geom, z, line_detail, buffer);
|
||||
}
|
||||
if (t == VT_POLYGON) {
|
||||
clipped = simple_clip_poly(geom, z, line_detail, buffer);
|
||||
}
|
||||
if (t == VT_POINT) {
|
||||
clipped = clip_point(geom, z, line_detail, buffer);
|
||||
}
|
||||
|
||||
clipped = remove_noop(clipped, t, 0);
|
||||
|
||||
// Must clip at z0 even if we don't want clipping, to handle features
|
||||
// that are duplicated across the date line
|
||||
|
||||
if (prevent[P_DUPLICATION] && z != 0) {
|
||||
if (point_within_tile((bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2, z, line_detail, buffer)) {
|
||||
// geom is unchanged
|
||||
} else {
|
||||
geom.clear();
|
||||
}
|
||||
} else if (prevent[P_CLIPPING] && z != 0) {
|
||||
if (clipped.size() == 0) {
|
||||
geom.clear();
|
||||
} else {
|
||||
// geom is unchanged
|
||||
}
|
||||
} else {
|
||||
geom = clipped;
|
||||
}
|
||||
}
|
||||
|
||||
if (geom.size() > 0) {
|
||||
unclipped_features++;
|
||||
}
|
||||
|
||||
if (first_time && pass == 1) { /* only write out the next zoom once, even if we retry */
|
||||
if (tippecanoe_maxzoom == -1 || tippecanoe_maxzoom >= nextzoom) {
|
||||
rewrite(geom, z, nextzoom, maxzoom, bbox, tx, ty, buffer, line_detail, within, geompos, geomfile, fname, t, layer, metastart, feature_minzoom, child_shards, max_zoom_increment, original_seq, tippecanoe_minzoom, tippecanoe_maxzoom, segment, initial_x, initial_y, m, metakeys, metavals, has_id, id, index, extent);
|
||||
}
|
||||
}
|
||||
|
||||
if (z < minzoom) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (tippecanoe_minzoom != -1 && z < tippecanoe_minzoom) {
|
||||
continue;
|
||||
}
|
||||
if (tippecanoe_maxzoom != -1 && z > tippecanoe_maxzoom) {
|
||||
continue;
|
||||
}
|
||||
if (tippecanoe_minzoom == -1 && z < feature_minzoom) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (gamma > 0) {
|
||||
if (manage_gap(index, &previndex, scale, gamma, &gap)) {
|
||||
if (manage_gap(sf.index, &previndex, scale, gamma, &gap)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (additional[A_DROP_DENSEST_AS_NEEDED]) {
|
||||
indices.push_back(index);
|
||||
if (index - merge_previndex < mingap) {
|
||||
indices.push_back(sf.index);
|
||||
if (sf.index - merge_previndex < mingap) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (additional[A_DROP_SMALLEST_AS_NEEDED]) {
|
||||
extents.push_back(extent);
|
||||
if (extent <= minextent && t != VT_POINT) {
|
||||
extents.push_back(sf.extent);
|
||||
if (sf.extent <= minextent && sf.t != VT_POINT) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -1500,8 +1565,8 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
// that standard, so that duplicates aren't reported as infinitely dense.
|
||||
|
||||
double o_density_previndex = density_previndex;
|
||||
if (!manage_gap(index, &density_previndex, scale, 1, &density_gap)) {
|
||||
spacing = (index - o_density_previndex) / scale;
|
||||
if (!manage_gap(sf.index, &density_previndex, scale, 1, &density_gap)) {
|
||||
spacing = (sf.index - o_density_previndex) / scale;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1512,39 +1577,63 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
fraction_accum -= 1;
|
||||
|
||||
bool reduced = false;
|
||||
if (t == VT_POLYGON) {
|
||||
if (sf.t == VT_POLYGON) {
|
||||
if (!prevent[P_TINY_POLYGON_REDUCTION] && !additional[A_GRID_LOW_ZOOMS]) {
|
||||
geom = reduce_tiny_poly(geom, z, line_detail, &reduced, &accum_area);
|
||||
sf.geometry = reduce_tiny_poly(sf.geometry, z, line_detail, &reduced, &accum_area);
|
||||
}
|
||||
has_polygons = true;
|
||||
}
|
||||
|
||||
if (geom.size() > 0) {
|
||||
if (sf.geometry.size() > 0) {
|
||||
partial p;
|
||||
p.geoms.push_back(geom);
|
||||
p.layer = layer;
|
||||
p.m = m;
|
||||
p.meta = meta;
|
||||
p.t = t;
|
||||
p.segment = segment;
|
||||
p.original_seq = original_seq;
|
||||
p.geoms.push_back(sf.geometry);
|
||||
p.layer = sf.layer;
|
||||
p.m = sf.m;
|
||||
p.t = sf.t;
|
||||
p.segment = sf.segment;
|
||||
p.original_seq = sf.seq;
|
||||
p.reduced = reduced;
|
||||
p.z = z;
|
||||
p.line_detail = line_detail;
|
||||
p.maxzoom = maxzoom;
|
||||
p.keys = metakeys;
|
||||
p.values = metavals;
|
||||
p.keys = sf.keys;
|
||||
p.values = sf.values;
|
||||
p.full_keys = sf.full_keys;
|
||||
p.full_values = sf.full_values;
|
||||
p.spacing = spacing;
|
||||
p.simplification = simplification;
|
||||
p.id = id;
|
||||
p.has_id = has_id;
|
||||
p.id = sf.id;
|
||||
p.has_id = sf.has_id;
|
||||
p.index2 = merge_previndex;
|
||||
p.index = index;
|
||||
p.index = sf.index;
|
||||
p.renamed = -1;
|
||||
partials.push_back(p);
|
||||
}
|
||||
|
||||
merge_previndex = index;
|
||||
merge_previndex = sf.index;
|
||||
}
|
||||
|
||||
if (prefilter != NULL) {
|
||||
json_end(prefilter_jp);
|
||||
if (fclose(prefilter_read_fp) != 0) {
|
||||
perror("close output from prefilter");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
while (1) {
|
||||
int stat_loc;
|
||||
if (waitpid(prefilter_pid, &stat_loc, 0) < 0) {
|
||||
perror("waitpid for prefilter\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (WIFEXITED(stat_loc) || WIFSIGNALED(stat_loc)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
void *ret;
|
||||
if (pthread_join(prefilter_writer, &ret) != 0) {
|
||||
perror("pthread_join prefilter writer");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
first_time = false;
|
||||
@ -1605,10 +1694,11 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
c.coalesced = false;
|
||||
c.original_seq = original_seq;
|
||||
c.m = partials[i].m;
|
||||
c.meta = partials[i].meta;
|
||||
c.stringpool = stringpool + pool_off[partials[i].segment];
|
||||
c.keys = partials[i].keys;
|
||||
c.values = partials[i].values;
|
||||
c.full_keys = partials[i].full_keys;
|
||||
c.full_values = partials[i].full_values;
|
||||
c.spacing = partials[i].spacing;
|
||||
c.id = partials[i].id;
|
||||
c.has_id = partials[i].has_id;
|
||||
@ -1731,6 +1821,11 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
feature.has_id = layer_features[x].has_id;
|
||||
|
||||
decode_meta(layer_features[x].m, layer_features[x].keys, layer_features[x].values, layer_features[x].stringpool, layer, feature);
|
||||
for (size_t a = 0; a < layer_features[x].full_keys.size(); a++) {
|
||||
serial_val sv = layer_features[x].full_values[a];
|
||||
mvt_value v = stringified_to_mvt_value(sv.type, sv.s.c_str());
|
||||
layer.tag(feature, layer_features[x].full_keys[a], v);
|
||||
}
|
||||
|
||||
if (additional[A_CALCULATE_FEATURE_DENSITY]) {
|
||||
int glow = 255;
|
||||
@ -1754,6 +1849,10 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
}
|
||||
}
|
||||
|
||||
if (postfilter != NULL) {
|
||||
tile.layers = filter_layers(postfilter, tile.layers, z, tx, ty, layermaps, tiling_seg, layer_unmaps, 1 << line_detail);
|
||||
}
|
||||
|
||||
if (z == 0 && unclipped_features < original_features / 2) {
|
||||
fprintf(stderr, "\n\nMore than half the features were clipped away at zoom level 0.\n");
|
||||
fprintf(stderr, "Is your data in the wrong projection? It should be in WGS84/EPSG:4326.\n");
|
||||
@ -2004,7 +2103,7 @@ void *run_thread(void *vargs) {
|
||||
|
||||
// fprintf(stderr, "%d/%u/%u\n", z, x, y);
|
||||
|
||||
long long len = write_tile(geom, &geompos, arg->metabase, arg->stringpool, z, x, y, z == arg->maxzoom ? arg->full_detail : arg->low_detail, arg->min_detail, arg->basezoom, arg->outdb, arg->outdir, arg->droprate, arg->buffer, arg->fname, arg->geomfile, arg->minzoom, arg->maxzoom, arg->todo, arg->along, geompos, arg->gamma, arg->child_shards, arg->meta_off, arg->pool_off, arg->initial_x, arg->initial_y, arg->running, arg->simplification, arg->layermaps, arg->layer_unmaps, arg->pass, arg->passes, arg->mingap, arg->minextent, arg->fraction, arg);
|
||||
long long len = write_tile(geom, &geompos, arg->metabase, arg->stringpool, z, x, y, z == arg->maxzoom ? arg->full_detail : arg->low_detail, arg->min_detail, arg->basezoom, arg->outdb, arg->outdir, arg->droprate, arg->buffer, arg->fname, arg->geomfile, arg->minzoom, arg->maxzoom, arg->todo, arg->along, geompos, arg->gamma, arg->child_shards, arg->meta_off, arg->pool_off, arg->initial_x, arg->initial_y, arg->running, arg->simplification, arg->layermaps, arg->layer_unmaps, arg->tiling_seg, arg->pass, arg->passes, arg->mingap, arg->minextent, arg->fraction, arg->prefilter, arg->postfilter, arg);
|
||||
|
||||
if (len < 0) {
|
||||
int *err = &arg->err;
|
||||
@ -2069,7 +2168,15 @@ void *run_thread(void *vargs) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, unsigned *midx, unsigned *midy, int &maxzoom, int minzoom, int basezoom, sqlite3 *outdb, const char *outdir, double droprate, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry>> &layermaps) {
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, unsigned *midx, unsigned *midy, int &maxzoom, int minzoom, int basezoom, sqlite3 *outdb, const char *outdir, double droprate, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry>> &layermaps, const char *prefilter, const char *postfilter) {
|
||||
// The existing layermaps are one table per input thread.
|
||||
// We need to add another one per *tiling* thread so that it can be
|
||||
// safely changed during tiling.
|
||||
size_t layermaps_off = layermaps.size();
|
||||
for (size_t i = 0; i < CPUS; i++) {
|
||||
layermaps.push_back(std::map<std::string, layermap_entry>());
|
||||
}
|
||||
|
||||
// Table to map segment and layer number back to layer name
|
||||
std::vector<std::vector<std::string>> layer_unmaps;
|
||||
for (size_t seg = 0; seg < layermaps.size(); seg++) {
|
||||
@ -2092,13 +2199,13 @@ int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpo
|
||||
for (size_t j = 0; j < TEMP_FILES; j++) {
|
||||
char geomname[strlen(tmpdir) + strlen("/geom.XXXXXXXX" XSTRINGIFY(INT_MAX)) + 1];
|
||||
sprintf(geomname, "%s/geom%zu.XXXXXXXX", tmpdir, j);
|
||||
subfd[j] = mkstemp(geomname);
|
||||
subfd[j] = mkstemp_cloexec(geomname);
|
||||
// printf("%s\n", geomname);
|
||||
if (subfd[j] < 0) {
|
||||
perror(geomname);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
sub[j] = fopen(geomname, "wb");
|
||||
sub[j] = fopen_oflag(geomname, "wb", O_WRONLY | O_CLOEXEC);
|
||||
if (sub[j] == NULL) {
|
||||
perror(geomname);
|
||||
exit(EXIT_FAILURE);
|
||||
@ -2237,6 +2344,9 @@ int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpo
|
||||
args[thread].initial_y = initial_y;
|
||||
args[thread].layermaps = &layermaps;
|
||||
args[thread].layer_unmaps = &layer_unmaps;
|
||||
args[thread].tiling_seg = thread + layermaps_off;
|
||||
args[thread].prefilter = prefilter;
|
||||
args[thread].postfilter = postfilter;
|
||||
|
||||
args[thread].tasks = dispatches[thread].tasks;
|
||||
args[thread].running = &running;
|
||||
|
2
tile.hpp
2
tile.hpp
@ -9,7 +9,7 @@
|
||||
|
||||
long long write_tile(char **geom, char *metabase, char *stringpool, unsigned *file_bbox, int z, unsigned x, unsigned y, int detail, int min_detail, int basezoom, sqlite3 *outdb, const char *outdir, double droprate, int buffer, const char *fname, FILE **geomfile, int file_minzoom, int file_maxzoom, double todo, char *geomstart, long long along, double gamma, int nlayers);
|
||||
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, unsigned *midx, unsigned *midy, int &maxzoom, int minzoom, int basezoom, sqlite3 *outdb, const char *outdir, double droprate, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry> > &layermap);
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, unsigned *midx, unsigned *midy, int &maxzoom, int minzoom, int basezoom, sqlite3 *outdb, const char *outdir, double droprate, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry> > &layermap, const char *prefilter, const char *postfilter);
|
||||
|
||||
int manage_gap(unsigned long long index, unsigned long long *previndex, double scale, double gamma, double *gap);
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
#ifndef VERSION_HPP
|
||||
#define VERSION_HPP
|
||||
|
||||
#define VERSION "tippecanoe v1.21.0\n"
|
||||
#define VERSION "tippecanoe v1.22.0\n"
|
||||
|
||||
#endif
|
||||
|
348
write_json.cpp
Normal file
348
write_json.cpp
Normal file
@ -0,0 +1,348 @@
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include "projection.hpp"
|
||||
#include "geometry.hpp"
|
||||
#include "mvt.hpp"
|
||||
#include "write_json.hpp"
|
||||
|
||||
struct lonlat {
|
||||
int op;
|
||||
double lon;
|
||||
double lat;
|
||||
long long x;
|
||||
long long y;
|
||||
|
||||
lonlat(int nop, double nlon, double nlat, long long nx, long long ny) {
|
||||
this->op = nop;
|
||||
this->lon = nlon;
|
||||
this->lat = nlat;
|
||||
this->x = nx;
|
||||
this->y = ny;
|
||||
}
|
||||
};
|
||||
|
||||
void layer_to_geojson(FILE *fp, mvt_layer const &layer, unsigned z, unsigned x, unsigned y, bool comma, bool name, bool zoom, unsigned long long index, long long sequence, long long extent, bool complain) {
|
||||
for (size_t f = 0; f < layer.features.size(); f++) {
|
||||
mvt_feature const &feat = layer.features[f];
|
||||
|
||||
if (comma && f != 0) {
|
||||
fprintf(fp, ",\n");
|
||||
}
|
||||
|
||||
fprintf(fp, "{ \"type\": \"Feature\"");
|
||||
|
||||
if (feat.has_id) {
|
||||
fprintf(fp, ", \"id\": %llu", feat.id);
|
||||
}
|
||||
|
||||
if (name || zoom || index != 0 || sequence != 0 || extent != 0) {
|
||||
bool need_comma = false;
|
||||
|
||||
fprintf(fp, ", \"tippecanoe\": { ");
|
||||
|
||||
if (name) {
|
||||
if (need_comma) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "\"layer\": ");
|
||||
fprintq(fp, layer.name.c_str());
|
||||
need_comma = true;
|
||||
}
|
||||
|
||||
if (zoom) {
|
||||
if (need_comma) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "\"minzoom\": %u, ", z);
|
||||
fprintf(fp, "\"maxzoom\": %u", z);
|
||||
need_comma = true;
|
||||
}
|
||||
|
||||
if (index != 0) {
|
||||
if (need_comma) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "\"index\": %llu", index);
|
||||
need_comma = true;
|
||||
}
|
||||
|
||||
if (sequence != 0) {
|
||||
if (need_comma) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "\"sequence\": %lld", sequence);
|
||||
need_comma = true;
|
||||
}
|
||||
|
||||
if (extent != 0) {
|
||||
if (need_comma) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "\"extent\": %lld", extent);
|
||||
need_comma = true;
|
||||
}
|
||||
|
||||
fprintf(fp, " }");
|
||||
}
|
||||
|
||||
fprintf(fp, ", \"properties\": { ");
|
||||
|
||||
for (size_t t = 0; t + 1 < feat.tags.size(); t += 2) {
|
||||
if (t != 0) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
|
||||
if (feat.tags[t] >= layer.keys.size()) {
|
||||
fprintf(stderr, "Error: out of bounds feature key (%u in %zu)\n", feat.tags[t], layer.keys.size());
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (feat.tags[t + 1] >= layer.values.size()) {
|
||||
fprintf(stderr, "Error: out of bounds feature value (%u in %zu)\n", feat.tags[t + 1], layer.values.size());
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
const char *key = layer.keys[feat.tags[t]].c_str();
|
||||
mvt_value const &val = layer.values[feat.tags[t + 1]];
|
||||
|
||||
if (val.type == mvt_string) {
|
||||
fprintq(fp, key);
|
||||
fprintf(fp, ": ");
|
||||
fprintq(fp, val.string_value.c_str());
|
||||
} else if (val.type == mvt_int) {
|
||||
fprintq(fp, key);
|
||||
fprintf(fp, ": %lld", (long long) val.numeric_value.int_value);
|
||||
} else if (val.type == mvt_double) {
|
||||
fprintq(fp, key);
|
||||
double v = val.numeric_value.double_value;
|
||||
if (v == (long long) v) {
|
||||
fprintf(fp, ": %lld", (long long) v);
|
||||
} else {
|
||||
fprintf(fp, ": %g", v);
|
||||
}
|
||||
} else if (val.type == mvt_float) {
|
||||
fprintq(fp, key);
|
||||
double v = val.numeric_value.float_value;
|
||||
if (v == (long long) v) {
|
||||
fprintf(fp, ": %lld", (long long) v);
|
||||
} else {
|
||||
fprintf(fp, ": %g", v);
|
||||
}
|
||||
} else if (val.type == mvt_sint) {
|
||||
fprintq(fp, key);
|
||||
fprintf(fp, ": %lld", (long long) val.numeric_value.sint_value);
|
||||
} else if (val.type == mvt_uint) {
|
||||
fprintq(fp, key);
|
||||
fprintf(fp, ": %lld", (long long) val.numeric_value.uint_value);
|
||||
} else if (val.type == mvt_bool) {
|
||||
fprintq(fp, key);
|
||||
fprintf(fp, ": %s", val.numeric_value.bool_value ? "true" : "false");
|
||||
}
|
||||
}
|
||||
|
||||
fprintf(fp, " }, \"geometry\": { ");
|
||||
|
||||
std::vector<lonlat> ops;
|
||||
|
||||
for (size_t g = 0; g < feat.geometry.size(); g++) {
|
||||
int op = feat.geometry[g].op;
|
||||
long long px = feat.geometry[g].x;
|
||||
long long py = feat.geometry[g].y;
|
||||
|
||||
if (op == VT_MOVETO || op == VT_LINETO) {
|
||||
long long scale = 1LL << (32 - z);
|
||||
long long wx = scale * x + (scale / layer.extent) * px;
|
||||
long long wy = scale * y + (scale / layer.extent) * py;
|
||||
|
||||
double lat, lon;
|
||||
projection->unproject(wx, wy, 32, &lon, &lat);
|
||||
|
||||
ops.push_back(lonlat(op, lon, lat, px, py));
|
||||
} else {
|
||||
ops.push_back(lonlat(op, 0, 0, 0, 0));
|
||||
}
|
||||
}
|
||||
|
||||
if (feat.type == VT_POINT) {
|
||||
if (ops.size() == 1) {
|
||||
fprintf(fp, "\"type\": \"Point\", \"coordinates\": [ %f, %f ]", ops[0].lon, ops[0].lat);
|
||||
} else {
|
||||
fprintf(fp, "\"type\": \"MultiPoint\", \"coordinates\": [ ");
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
if (i != 0) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "[ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
}
|
||||
fprintf(fp, " ]");
|
||||
}
|
||||
} else if (feat.type == VT_LINE) {
|
||||
int movetos = 0;
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
if (ops[i].op == VT_MOVETO) {
|
||||
movetos++;
|
||||
}
|
||||
}
|
||||
|
||||
if (movetos < 2) {
|
||||
fprintf(fp, "\"type\": \"LineString\", \"coordinates\": [ ");
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
if (i != 0) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "[ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
}
|
||||
fprintf(fp, " ]");
|
||||
} else {
|
||||
fprintf(fp, "\"type\": \"MultiLineString\", \"coordinates\": [ [ ");
|
||||
int state = 0;
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
if (ops[i].op == VT_MOVETO) {
|
||||
if (state == 0) {
|
||||
fprintf(fp, "[ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
state = 1;
|
||||
} else {
|
||||
fprintf(fp, " ], [ ");
|
||||
fprintf(fp, "[ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
state = 1;
|
||||
}
|
||||
} else {
|
||||
fprintf(fp, ", [ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
}
|
||||
}
|
||||
fprintf(fp, " ] ]");
|
||||
}
|
||||
} else if (feat.type == VT_POLYGON) {
|
||||
std::vector<std::vector<lonlat> > rings;
|
||||
std::vector<double> areas;
|
||||
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
if (ops[i].op == VT_MOVETO) {
|
||||
rings.push_back(std::vector<lonlat>());
|
||||
areas.push_back(0);
|
||||
}
|
||||
|
||||
int n = rings.size() - 1;
|
||||
if (n >= 0) {
|
||||
if (ops[i].op == VT_CLOSEPATH) {
|
||||
rings[n].push_back(rings[n][0]);
|
||||
} else {
|
||||
rings[n].push_back(ops[i]);
|
||||
}
|
||||
}
|
||||
|
||||
if (i + 1 >= ops.size() || ops[i + 1].op == VT_MOVETO) {
|
||||
if (ops[i].op != VT_CLOSEPATH) {
|
||||
static bool warned = false;
|
||||
|
||||
if (!warned) {
|
||||
fprintf(stderr, "Ring does not end with closepath (ends with %d)\n", ops[i].op);
|
||||
if (complain) {
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
warned = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int outer = 0;
|
||||
|
||||
for (size_t i = 0; i < rings.size(); i++) {
|
||||
long double area = 0;
|
||||
for (size_t k = 0; k < rings[i].size(); k++) {
|
||||
if (rings[i][k].op != VT_CLOSEPATH) {
|
||||
area += (long double) rings[i][k].x * (long double) rings[i][(k + 1) % rings[i].size()].y;
|
||||
area -= (long double) rings[i][k].y * (long double) rings[i][(k + 1) % rings[i].size()].x;
|
||||
}
|
||||
}
|
||||
area /= 2;
|
||||
|
||||
areas[i] = area;
|
||||
if (areas[i] >= 0 || i == 0) {
|
||||
outer++;
|
||||
}
|
||||
|
||||
// fprintf(fp, "\"area\": %Lf,", area);
|
||||
}
|
||||
|
||||
if (outer > 1) {
|
||||
fprintf(fp, "\"type\": \"MultiPolygon\", \"coordinates\": [ [ [ ");
|
||||
} else {
|
||||
fprintf(fp, "\"type\": \"Polygon\", \"coordinates\": [ [ ");
|
||||
}
|
||||
|
||||
int state = 0;
|
||||
for (size_t i = 0; i < rings.size(); i++) {
|
||||
if (i == 0 && areas[i] < 0) {
|
||||
static bool warned = false;
|
||||
|
||||
if (!warned) {
|
||||
fprintf(stderr, "Polygon begins with an inner ring\n");
|
||||
if (complain) {
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
warned = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (areas[i] >= 0) {
|
||||
if (state != 0) {
|
||||
// new multipolygon
|
||||
fprintf(fp, " ] ], [ [ ");
|
||||
}
|
||||
state = 1;
|
||||
}
|
||||
|
||||
if (state == 2) {
|
||||
// new ring in the same polygon
|
||||
fprintf(fp, " ], [ ");
|
||||
}
|
||||
|
||||
for (size_t j = 0; j < rings[i].size(); j++) {
|
||||
if (rings[i][j].op != VT_CLOSEPATH) {
|
||||
if (j != 0) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
|
||||
fprintf(fp, "[ %f, %f ]", rings[i][j].lon, rings[i][j].lat);
|
||||
} else {
|
||||
if (j != 0) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
|
||||
fprintf(fp, "[ %f, %f ]", rings[i][0].lon, rings[i][0].lat);
|
||||
}
|
||||
}
|
||||
|
||||
state = 2;
|
||||
}
|
||||
|
||||
if (outer > 1) {
|
||||
fprintf(fp, " ] ] ]");
|
||||
} else {
|
||||
fprintf(fp, " ] ]");
|
||||
}
|
||||
}
|
||||
|
||||
fprintf(fp, " } }\n");
|
||||
}
|
||||
}
|
||||
|
||||
void fprintq(FILE *fp, const char *s) {
|
||||
fputc('"', fp);
|
||||
for (; *s; s++) {
|
||||
if (*s == '\\' || *s == '"') {
|
||||
fprintf(fp, "\\%c", *s);
|
||||
} else if (*s >= 0 && *s < ' ') {
|
||||
fprintf(fp, "\\u%04x", *s);
|
||||
} else {
|
||||
fputc(*s, fp);
|
||||
}
|
||||
}
|
||||
fputc('"', fp);
|
||||
}
|
2
write_json.hpp
Normal file
2
write_json.hpp
Normal file
@ -0,0 +1,2 @@
|
||||
void layer_to_geojson(FILE *fp, mvt_layer const &layer, unsigned z, unsigned x, unsigned y, bool comma, bool name, bool zoom, unsigned long long index, long long sequence, long long extent, bool complain);
|
||||
void fprintq(FILE *f, const char *s);
|
Loading…
x
Reference in New Issue
Block a user