mirror of
https://github.com/mapbox/tippecanoe.git
synced 2025-02-22 18:12:40 +00:00
Merge branch 'master' into object-attributes
This commit is contained in:
commit
c64a39cc58
@ -1,3 +1,8 @@
|
||||
## 1.27.11
|
||||
|
||||
* Always include tile and layer in tippecanoe-decode, fixing corrupt JSON.
|
||||
* Clean up writing of JSON in general.
|
||||
|
||||
## 1.27.10
|
||||
|
||||
* Add --progress-interval setting to reduce progress indicator frequency
|
||||
|
6
Makefile
6
Makefile
@ -144,11 +144,13 @@ decode-test:
|
||||
./tippecanoe -z11 -Z11 -f -o tests/muni/decode/multi.mbtiles tests/muni/*.json
|
||||
./tippecanoe-decode -l subway tests/muni/decode/multi.mbtiles > tests/muni/decode/multi.mbtiles.json.check
|
||||
./tippecanoe-decode -c tests/muni/decode/multi.mbtiles > tests/muni/decode/multi.mbtiles.pipeline.json.check
|
||||
./tippecanoe-decode tests/muni/decode/multi.mbtiles 11 327 791 > tests/muni/decode/multi.mbtiles.onetile.json.check
|
||||
./tippecanoe-decode --stats tests/muni/decode/multi.mbtiles > tests/muni/decode/multi.mbtiles.stats.json.check
|
||||
cmp tests/muni/decode/multi.mbtiles.json.check tests/muni/decode/multi.mbtiles.json
|
||||
cmp tests/muni/decode/multi.mbtiles.pipeline.json.check tests/muni/decode/multi.mbtiles.pipeline.json
|
||||
cmp tests/muni/decode/multi.mbtiles.onetile.json.check tests/muni/decode/multi.mbtiles.onetile.json
|
||||
cmp tests/muni/decode/multi.mbtiles.stats.json.check tests/muni/decode/multi.mbtiles.stats.json
|
||||
rm -f tests/muni/decode/multi.mbtiles.json.check tests/muni/decode/multi.mbtiles tests/muni/decode/multi.mbtiles.pipeline.json.check tests/muni/decode/multi.mbtiles.stats.json.check
|
||||
rm -f tests/muni/decode/multi.mbtiles.json.check tests/muni/decode/multi.mbtiles tests/muni/decode/multi.mbtiles.pipeline.json.check tests/muni/decode/multi.mbtiles.stats.json.check tests/muni/decode/multi.mbtiles.onetile.json.check
|
||||
|
||||
pbf-test:
|
||||
./tippecanoe-decode tests/pbf/11-328-791.vector.pbf 11 328 791 > tests/pbf/11-328-791.vector.pbf.out
|
||||
@ -164,7 +166,7 @@ enumerate-test:
|
||||
cmp tests/ne_110m_admin_0_countries/out/enum tests/ne_110m_admin_0_countries/out/enum.check
|
||||
rm tests/ne_110m_admin_0_countries/out/enum.mbtiles tests/ne_110m_admin_0_countries/out/enum.check
|
||||
|
||||
join-test:
|
||||
join-test: tile-join
|
||||
./tippecanoe -f -z12 -o tests/join-population/tabblock_06001420.mbtiles tests/join-population/tabblock_06001420.json
|
||||
./tippecanoe -f -Z5 -z10 -o tests/join-population/macarthur.mbtiles -l macarthur tests/join-population/macarthur.json
|
||||
./tile-join -f -Z6 -z9 -o tests/join-population/macarthur-6-9.mbtiles tests/join-population/macarthur.mbtiles
|
||||
|
@ -604,7 +604,7 @@ or on an individual tile:
|
||||
tippecanoe-decode file.mbtiles zoom x y
|
||||
tippecanoe-decode file.vector.pbf zoom x y
|
||||
|
||||
If you decode an entire file, you get a nested `FeatureCollection` identifying each
|
||||
Unless you use `-c`, the output is a set of nested FeatureCollections identifying each
|
||||
tile and layer separately. Note that the same features generally appear at all zooms,
|
||||
so the output for the file will have many copies of the same features at different
|
||||
resolutions.
|
||||
|
201
decode.cpp
201
decode.cpp
@ -27,17 +27,31 @@ int minzoom = 0;
|
||||
int maxzoom = 32;
|
||||
bool force = false;
|
||||
|
||||
void do_stats(mvt_tile &tile, size_t size, bool compressed, int z, unsigned x, unsigned y) {
|
||||
printf("{ \"zoom\": %d, \"x\": %u, \"y\": %u, \"bytes\": %zu, \"compressed\": %s", z, x, y, size, compressed ? "true" : "false");
|
||||
void do_stats(mvt_tile &tile, size_t size, bool compressed, int z, unsigned x, unsigned y, json_writer &state) {
|
||||
state.json_write_hash();
|
||||
|
||||
state.json_write_string("zoom");
|
||||
state.json_write_signed(z);
|
||||
|
||||
state.json_write_string("x");
|
||||
state.json_write_unsigned(x);
|
||||
|
||||
state.json_write_string("y");
|
||||
state.json_write_unsigned(y);
|
||||
|
||||
state.json_write_string("bytes");
|
||||
state.json_write_unsigned(size);
|
||||
|
||||
state.json_write_string("compressed");
|
||||
state.json_write_bool(compressed);
|
||||
|
||||
state.json_write_string("layers");
|
||||
state.json_write_hash();
|
||||
|
||||
printf(", \"layers\": { ");
|
||||
for (size_t i = 0; i < tile.layers.size(); i++) {
|
||||
if (i != 0) {
|
||||
printf(", ");
|
||||
}
|
||||
fprintq(stdout, tile.layers[i].name.c_str());
|
||||
state.json_write_string(tile.layers[i].name);
|
||||
|
||||
int points = 0, lines = 0, polygons = 0;
|
||||
size_t points = 0, lines = 0, polygons = 0;
|
||||
for (size_t j = 0; j < tile.layers[i].features.size(); j++) {
|
||||
if (tile.layers[i].features[j].type == mvt_point) {
|
||||
points++;
|
||||
@ -48,13 +62,30 @@ void do_stats(mvt_tile &tile, size_t size, bool compressed, int z, unsigned x, u
|
||||
}
|
||||
}
|
||||
|
||||
printf(": { \"points\": %d, \"lines\": %d, \"polygons\": %d, \"extent\": %lld }", points, lines, polygons, tile.layers[i].extent);
|
||||
state.json_write_hash();
|
||||
|
||||
state.json_write_string("points");
|
||||
state.json_write_unsigned(points);
|
||||
|
||||
state.json_write_string("lines");
|
||||
state.json_write_unsigned(lines);
|
||||
|
||||
state.json_write_string("polygons");
|
||||
state.json_write_unsigned(polygons);
|
||||
|
||||
state.json_write_string("extent");
|
||||
state.json_write_signed(tile.layers[i].extent);
|
||||
|
||||
state.json_end_hash();
|
||||
}
|
||||
|
||||
printf(" } }\n");
|
||||
state.json_end_hash();
|
||||
state.json_end_hash();
|
||||
|
||||
state.json_write_newline();
|
||||
}
|
||||
|
||||
void handle(std::string message, int z, unsigned x, unsigned y, int describe, std::set<std::string> const &to_decode, bool pipeline, bool stats) {
|
||||
void handle(std::string message, int z, unsigned x, unsigned y, std::set<std::string> const &to_decode, bool pipeline, bool stats, json_writer &state) {
|
||||
mvt_tile tile;
|
||||
bool was_compressed;
|
||||
|
||||
@ -69,28 +100,57 @@ void handle(std::string message, int z, unsigned x, unsigned y, int describe, st
|
||||
}
|
||||
|
||||
if (stats) {
|
||||
do_stats(tile, message.size(), was_compressed, z, x, y);
|
||||
do_stats(tile, message.size(), was_compressed, z, x, y, state);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!pipeline) {
|
||||
printf("{ \"type\": \"FeatureCollection\"");
|
||||
state.json_write_hash();
|
||||
|
||||
state.json_write_string("type");
|
||||
state.json_write_string("FeatureCollection");
|
||||
|
||||
if (true) {
|
||||
state.json_write_string("properties");
|
||||
state.json_write_hash();
|
||||
|
||||
state.json_write_string("zoom");
|
||||
state.json_write_signed(z);
|
||||
|
||||
state.json_write_string("x");
|
||||
state.json_write_signed(x);
|
||||
|
||||
state.json_write_string("y");
|
||||
state.json_write_signed(y);
|
||||
|
||||
if (describe) {
|
||||
printf(", \"properties\": { \"zoom\": %d, \"x\": %d, \"y\": %d", z, x, y);
|
||||
if (!was_compressed) {
|
||||
printf(", \"compressed\": false");
|
||||
state.json_write_string("compressed");
|
||||
state.json_write_bool(false);
|
||||
}
|
||||
printf(" }");
|
||||
|
||||
state.json_end_hash();
|
||||
|
||||
if (projection != projections) {
|
||||
printf(", \"crs\": { \"type\": \"name\", \"properties\": { \"name\": ");
|
||||
fprintq(stdout, projection->alias);
|
||||
printf(" } }");
|
||||
state.json_write_string("crs");
|
||||
state.json_write_hash();
|
||||
|
||||
state.json_write_string("type");
|
||||
state.json_write_string("name");
|
||||
|
||||
state.json_write_string("properties");
|
||||
state.json_write_hash();
|
||||
|
||||
state.json_write_string("name");
|
||||
state.json_write_string(projection->alias);
|
||||
|
||||
state.json_end_hash();
|
||||
state.json_end_hash();
|
||||
}
|
||||
}
|
||||
|
||||
printf(", \"features\": [\n");
|
||||
state.json_write_string("features");
|
||||
state.json_write_array();
|
||||
state.json_write_newline();
|
||||
}
|
||||
|
||||
bool first_layer = true;
|
||||
@ -107,18 +167,34 @@ void handle(std::string message, int z, unsigned x, unsigned y, int describe, st
|
||||
}
|
||||
|
||||
if (!pipeline) {
|
||||
if (describe) {
|
||||
if (true) {
|
||||
if (!first_layer) {
|
||||
printf(",\n");
|
||||
state.json_comma_newline();
|
||||
}
|
||||
|
||||
printf("{ \"type\": \"FeatureCollection\"");
|
||||
printf(", \"properties\": { \"layer\": ");
|
||||
fprintq(stdout, layer.name.c_str());
|
||||
printf(", \"version\": %d, \"extent\": %lld", layer.version, layer.extent);
|
||||
printf(" }");
|
||||
printf(", \"features\": [\n");
|
||||
state.json_write_hash();
|
||||
|
||||
state.json_write_string("type");
|
||||
state.json_write_string("FeatureCollection");
|
||||
|
||||
state.json_write_string("properties");
|
||||
state.json_write_hash();
|
||||
|
||||
state.json_write_string("layer");
|
||||
state.json_write_string(layer.name);
|
||||
|
||||
state.json_write_string("version");
|
||||
state.json_write_signed(layer.version);
|
||||
|
||||
state.json_write_string("extent");
|
||||
state.json_write_signed(layer.extent);
|
||||
|
||||
state.json_end_hash();
|
||||
|
||||
state.json_write_string("features");
|
||||
state.json_write_array();
|
||||
|
||||
state.json_write_newline();
|
||||
first_layer = false;
|
||||
}
|
||||
}
|
||||
@ -129,17 +205,21 @@ void handle(std::string message, int z, unsigned x, unsigned y, int describe, st
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
layer_to_geojson(stdout, layer, z, x, y, !pipeline, pipeline, pipeline, false, 0, 0, 0, !force);
|
||||
layer_to_geojson(layer, z, x, y, !pipeline, pipeline, pipeline, false, 0, 0, 0, !force, state);
|
||||
|
||||
if (!pipeline) {
|
||||
if (describe) {
|
||||
printf("] }\n");
|
||||
if (true) {
|
||||
state.json_end_array();
|
||||
state.json_end_hash();
|
||||
state.json_write_newline();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!pipeline) {
|
||||
printf("] }\n");
|
||||
state.json_end_array();
|
||||
state.json_end_hash();
|
||||
state.json_write_newline();
|
||||
}
|
||||
}
|
||||
|
||||
@ -148,6 +228,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
bool isdir = false;
|
||||
int oz = z;
|
||||
unsigned ox = x, oy = y;
|
||||
json_writer state(stdout);
|
||||
|
||||
int fd = open(fname, O_RDONLY | O_CLOEXEC);
|
||||
if (fd >= 0) {
|
||||
@ -159,7 +240,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
if (strcmp(map, "SQLite format 3") != 0) {
|
||||
if (z >= 0) {
|
||||
std::string s = std::string(map, st.st_size);
|
||||
handle(s, z, x, y, 1, to_decode, pipeline, stats);
|
||||
handle(s, z, x, y, to_decode, pipeline, stats, state);
|
||||
munmap(map, st.st_size);
|
||||
return;
|
||||
} else {
|
||||
@ -199,7 +280,14 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
int within = 0;
|
||||
|
||||
if (!pipeline && !stats) {
|
||||
printf("{ \"type\": \"FeatureCollection\", \"properties\": {\n");
|
||||
state.json_write_hash();
|
||||
|
||||
state.json_write_string("type");
|
||||
state.json_write_string("FeatureCollection");
|
||||
|
||||
state.json_write_string("properties");
|
||||
state.json_write_hash();
|
||||
state.json_write_newline();
|
||||
|
||||
const char *sql2 = "SELECT name, value from metadata order by name;";
|
||||
sqlite3_stmt *stmt2;
|
||||
@ -210,7 +298,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
|
||||
while (sqlite3_step(stmt2) == SQLITE_ROW) {
|
||||
if (within) {
|
||||
printf(",\n");
|
||||
state.json_comma_newline();
|
||||
}
|
||||
within = 1;
|
||||
|
||||
@ -222,20 +310,27 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
fprintq(stdout, (char *) name);
|
||||
printf(": ");
|
||||
fprintq(stdout, (char *) value);
|
||||
state.json_write_string((char *) name);
|
||||
state.json_write_string((char *) value);
|
||||
}
|
||||
|
||||
state.json_write_newline();
|
||||
state.wantnl = false; // XXX
|
||||
|
||||
sqlite3_finalize(stmt2);
|
||||
}
|
||||
|
||||
if (stats) {
|
||||
printf("[\n");
|
||||
state.json_write_array();
|
||||
state.json_write_newline();
|
||||
}
|
||||
|
||||
if (!pipeline && !stats) {
|
||||
printf("\n}, \"features\": [\n");
|
||||
state.json_end_hash();
|
||||
|
||||
state.json_write_string("features");
|
||||
state.json_write_array();
|
||||
state.json_write_newline();
|
||||
}
|
||||
|
||||
if (isdir) {
|
||||
@ -243,13 +338,13 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
for (size_t i = 0; i < tiles.size(); i++) {
|
||||
if (!pipeline && !stats) {
|
||||
if (within) {
|
||||
printf(",\n");
|
||||
state.json_comma_newline();
|
||||
}
|
||||
within = 1;
|
||||
}
|
||||
if (stats) {
|
||||
if (within) {
|
||||
printf(",\n");
|
||||
state.json_comma_newline();
|
||||
}
|
||||
within = 1;
|
||||
}
|
||||
@ -269,7 +364,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
}
|
||||
fclose(f);
|
||||
|
||||
handle(s, tiles[i].z, tiles[i].x, tiles[i].y, 1, to_decode, pipeline, stats);
|
||||
handle(s, tiles[i].z, tiles[i].x, tiles[i].y, to_decode, pipeline, stats, state);
|
||||
}
|
||||
} else {
|
||||
const char *sql = "SELECT tile_data, zoom_level, tile_column, tile_row from tiles where zoom_level between ? and ? order by zoom_level, tile_column, tile_row;";
|
||||
@ -286,13 +381,13 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
while (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
if (!pipeline && !stats) {
|
||||
if (within) {
|
||||
printf(",\n");
|
||||
state.json_comma_newline();
|
||||
}
|
||||
within = 1;
|
||||
}
|
||||
if (stats) {
|
||||
if (within) {
|
||||
printf(",\n");
|
||||
state.json_comma_newline();
|
||||
}
|
||||
within = 1;
|
||||
}
|
||||
@ -310,17 +405,23 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
ty = (1LL << tz) - 1 - ty;
|
||||
const char *s = (const char *) sqlite3_column_blob(stmt, 0);
|
||||
|
||||
handle(std::string(s, len), tz, tx, ty, 1, to_decode, pipeline, stats);
|
||||
handle(std::string(s, len), tz, tx, ty, to_decode, pipeline, stats, state);
|
||||
}
|
||||
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
|
||||
if (!pipeline && !stats) {
|
||||
printf("] }\n");
|
||||
state.json_end_array();
|
||||
state.json_end_hash();
|
||||
state.json_write_newline();
|
||||
}
|
||||
if (stats) {
|
||||
printf("]\n");
|
||||
state.json_end_array();
|
||||
state.json_write_newline();
|
||||
}
|
||||
if (pipeline) {
|
||||
state.json_write_newline();
|
||||
}
|
||||
} else {
|
||||
int handled = 0;
|
||||
@ -344,7 +445,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
fprintf(stderr, "%s: Warning: using tile %d/%u/%u instead of %d/%u/%u\n", fname, z, x, y, oz, ox, oy);
|
||||
}
|
||||
|
||||
handle(std::string(s, len), z, x, y, 0, to_decode, pipeline, stats);
|
||||
handle(std::string(s, len), z, x, y, to_decode, pipeline, stats, state);
|
||||
handled = 1;
|
||||
}
|
||||
|
||||
|
@ -723,7 +723,7 @@ tippecanoe\-decode file.vector.pbf zoom x y
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
If you decode an entire file, you get a nested \fB\fCFeatureCollection\fR identifying each
|
||||
Unless you use \fB\fC\-c\fR, the output is a set of nested FeatureCollections identifying each
|
||||
tile and layer separately. Note that the same features generally appear at all zooms,
|
||||
so the output for the file will have many copies of the same features at different
|
||||
resolutions.
|
||||
|
232
mbtiles.cpp
232
mbtiles.cpp
@ -11,6 +11,7 @@
|
||||
#include "mbtiles.hpp"
|
||||
#include "text.hpp"
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "write_json.hpp"
|
||||
|
||||
sqlite3 *mbtiles_open(char *dbname, char **argv, int forcetable) {
|
||||
sqlite3 *outdb;
|
||||
@ -82,23 +83,6 @@ void mbtiles_write_tile(sqlite3 *outdb, int z, int tx, int ty, const char *data,
|
||||
}
|
||||
}
|
||||
|
||||
static void quote(std::string &buf, std::string const &s) {
|
||||
for (size_t i = 0; i < s.size(); i++) {
|
||||
unsigned char ch = s[i];
|
||||
|
||||
if (ch == '\\' || ch == '\"') {
|
||||
buf.push_back('\\');
|
||||
buf.push_back(ch);
|
||||
} else if (ch < ' ') {
|
||||
char tmp[7];
|
||||
sprintf(tmp, "\\u%04x", ch);
|
||||
buf.append(std::string(tmp));
|
||||
} else {
|
||||
buf.push_back(ch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool type_and_string::operator<(const type_and_string &o) const {
|
||||
if (string < o.string) {
|
||||
return true;
|
||||
@ -119,36 +103,36 @@ bool type_and_string::operator!=(const type_and_string &o) const {
|
||||
return false;
|
||||
}
|
||||
|
||||
std::string tilestats(std::map<std::string, layermap_entry> const &layermap1, size_t elements) {
|
||||
void tilestats(std::map<std::string, layermap_entry> const &layermap1, size_t elements, json_writer &state) {
|
||||
// Consolidate layers/attributes whose names are truncated
|
||||
std::vector<std::map<std::string, layermap_entry>> lmv;
|
||||
lmv.push_back(layermap1);
|
||||
std::map<std::string, layermap_entry> layermap = merge_layermaps(lmv, true);
|
||||
|
||||
std::string out = "{\n";
|
||||
state.json_write_hash();
|
||||
|
||||
out.append("\t\"layerCount\": ");
|
||||
out.append(std::to_string(layermap.size()));
|
||||
out.append(",\n");
|
||||
state.nospace = true;
|
||||
state.json_write_string("layerCount");
|
||||
state.json_write_unsigned(layermap.size());
|
||||
|
||||
out.append("\t\"layers\": [\n");
|
||||
state.nospace = true;
|
||||
state.json_write_string("layers");
|
||||
state.json_write_array();
|
||||
|
||||
bool first = true;
|
||||
for (auto layer : layermap) {
|
||||
if (!first) {
|
||||
out.append(",\n");
|
||||
}
|
||||
first = false;
|
||||
|
||||
out.append("\t\t{\n");
|
||||
state.nospace = true;
|
||||
state.json_write_hash();
|
||||
|
||||
out.append("\t\t\t\"layer\": \"");
|
||||
quote(out, layer.first.c_str());
|
||||
out.append("\",\n");
|
||||
state.nospace = true;
|
||||
state.json_write_string("layer");
|
||||
state.json_write_string(layer.first);
|
||||
|
||||
out.append("\t\t\t\"count\": ");
|
||||
out.append(std::to_string(layer.second.points + layer.second.lines + layer.second.polygons));
|
||||
out.append(",\n");
|
||||
state.nospace = true;
|
||||
state.json_write_string("count");
|
||||
state.json_write_unsigned(layer.second.points + layer.second.lines + layer.second.polygons);
|
||||
|
||||
std::string geomtype = "Polygon";
|
||||
if (layer.second.points >= layer.second.lines && layer.second.points >= layer.second.polygons) {
|
||||
@ -157,45 +141,46 @@ std::string tilestats(std::map<std::string, layermap_entry> const &layermap1, si
|
||||
geomtype = "LineString";
|
||||
}
|
||||
|
||||
out.append("\t\t\t\"geometry\": \"");
|
||||
quote(out, geomtype.c_str());
|
||||
out.append("\",\n");
|
||||
state.nospace = true;
|
||||
state.json_write_string("geometry");
|
||||
state.json_write_string(geomtype);
|
||||
|
||||
size_t attrib_count = layer.second.file_keys.size();
|
||||
if (attrib_count > 1000) {
|
||||
attrib_count = 1000;
|
||||
}
|
||||
|
||||
out.append("\t\t\t\"attributeCount\": ");
|
||||
out.append(std::to_string(attrib_count));
|
||||
out.append(",\n");
|
||||
state.nospace = true;
|
||||
state.json_write_string("attributeCount");
|
||||
state.json_write_unsigned(attrib_count);
|
||||
|
||||
out.append("\t\t\t\"attributes\": [\n");
|
||||
state.nospace = true;
|
||||
state.json_write_string("attributes");
|
||||
state.nospace = true;
|
||||
state.json_write_array();
|
||||
|
||||
size_t attrs = 0;
|
||||
for (auto attribute : layer.second.file_keys) {
|
||||
if (attrs == elements) {
|
||||
break;
|
||||
}
|
||||
if (attrs != 0) {
|
||||
out.append(",\n");
|
||||
}
|
||||
attrs++;
|
||||
|
||||
out.append("\t\t\t\t{\n");
|
||||
state.nospace = true;
|
||||
state.json_write_hash();
|
||||
|
||||
out.append("\t\t\t\t\t\"attribute\": \"");
|
||||
quote(out, attribute.first.c_str());
|
||||
out.append("\",\n");
|
||||
state.nospace = true;
|
||||
state.json_write_string("attribute");
|
||||
state.json_write_string(attribute.first);
|
||||
|
||||
size_t val_count = attribute.second.sample_values.size();
|
||||
if (val_count > 1000) {
|
||||
val_count = 1000;
|
||||
}
|
||||
|
||||
out.append("\t\t\t\t\t\"count\": ");
|
||||
out.append(std::to_string(val_count));
|
||||
out.append(",\n");
|
||||
state.nospace = true;
|
||||
state.json_write_string("count");
|
||||
state.json_write_unsigned(val_count);
|
||||
|
||||
int type = 0;
|
||||
for (auto s : attribute.second.sample_values) {
|
||||
@ -214,11 +199,13 @@ std::string tilestats(std::map<std::string, layermap_entry> const &layermap1, si
|
||||
type_str = "mixed";
|
||||
}
|
||||
|
||||
out.append("\t\t\t\t\t\"type\": \"");
|
||||
quote(out, type_str.c_str());
|
||||
out.append("\",\n");
|
||||
state.nospace = true;
|
||||
state.json_write_string("type");
|
||||
state.json_write_string(type_str);
|
||||
|
||||
out.append("\t\t\t\t\t\"values\": [\n");
|
||||
state.nospace = true;
|
||||
state.json_write_string("values");
|
||||
state.json_write_array();
|
||||
|
||||
size_t vals = 0;
|
||||
for (auto value : attribute.second.sample_values) {
|
||||
@ -226,65 +213,50 @@ std::string tilestats(std::map<std::string, layermap_entry> const &layermap1, si
|
||||
break;
|
||||
}
|
||||
|
||||
state.nospace = true;
|
||||
|
||||
if (value.type == mvt_double || value.type == mvt_bool) {
|
||||
if (vals != 0) {
|
||||
out.append(",\n");
|
||||
}
|
||||
vals++;
|
||||
|
||||
out.append("\t\t\t\t\t\t");
|
||||
out.append(value.string);
|
||||
state.json_write_stringified(value.string);
|
||||
} else {
|
||||
std::string trunc = truncate16(value.string, 256);
|
||||
|
||||
if (trunc.size() == value.string.size()) {
|
||||
if (vals != 0) {
|
||||
out.append(",\n");
|
||||
}
|
||||
vals++;
|
||||
|
||||
out.append("\t\t\t\t\t\t\"");
|
||||
quote(out, value.string.c_str());
|
||||
out.append("\"");
|
||||
state.json_write_string(value.string);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out.append("\n");
|
||||
out.append("\t\t\t\t\t]");
|
||||
state.nospace = true;
|
||||
state.json_end_array();
|
||||
|
||||
if ((type & (1 << mvt_double)) != 0) {
|
||||
out.append(",\n");
|
||||
state.nospace = true;
|
||||
state.json_write_string("min");
|
||||
state.json_write_number(attribute.second.min);
|
||||
|
||||
out.append("\t\t\t\t\t\"min\": ");
|
||||
out.append(milo::dtoa_milo(attribute.second.min));
|
||||
out.append(",\n");
|
||||
|
||||
out.append("\t\t\t\t\t\"max\": ");
|
||||
out.append(milo::dtoa_milo(attribute.second.max));
|
||||
state.nospace = true;
|
||||
state.json_write_string("max");
|
||||
state.json_write_number(attribute.second.max);
|
||||
}
|
||||
|
||||
out.append("\n");
|
||||
out.append("\t\t\t\t}");
|
||||
state.nospace = true;
|
||||
state.json_end_hash();
|
||||
}
|
||||
|
||||
out.append("\n\t\t\t]\n");
|
||||
out.append("\t\t}");
|
||||
state.nospace = true;
|
||||
state.json_end_array();
|
||||
state.nospace = true;
|
||||
state.json_end_hash();
|
||||
}
|
||||
|
||||
out.append("\n");
|
||||
out.append("\t]\n");
|
||||
out.append("}");
|
||||
|
||||
std::string out2;
|
||||
|
||||
for (size_t i = 0; i < out.size(); i++) {
|
||||
if (out[i] != '\t' && out[i] != '\n') {
|
||||
out2.push_back(out[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return out2;
|
||||
state.nospace = true;
|
||||
state.json_end_array();
|
||||
state.nospace = true;
|
||||
state.json_end_hash();
|
||||
}
|
||||
|
||||
void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fname, int minzoom, int maxzoom, double minlat, double minlon, double maxlat, double maxlon, double midlat, double midlon, int forcetable, const char *attribution, std::map<std::string, layermap_entry> const &layermap, bool vector, const char *description, bool do_tilestats) {
|
||||
@ -399,8 +371,13 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
std::string buf;
|
||||
|
||||
{
|
||||
buf = "{";
|
||||
aprintf(&buf, "\"vector_layers\": [ ");
|
||||
json_writer state(&buf);
|
||||
|
||||
state.json_write_hash();
|
||||
state.nospace = true;
|
||||
|
||||
state.json_write_string("vector_layers");
|
||||
state.json_write_array();
|
||||
|
||||
std::vector<std::string> lnames;
|
||||
for (auto ai = layermap.begin(); ai != layermap.end(); ++ai) {
|
||||
@ -408,25 +385,32 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < lnames.size(); i++) {
|
||||
if (i != 0) {
|
||||
aprintf(&buf, ", ");
|
||||
}
|
||||
|
||||
auto fk = layermap.find(lnames[i]);
|
||||
aprintf(&buf, "{ \"id\": \"");
|
||||
quote(buf, lnames[i]);
|
||||
aprintf(&buf, "\", \"description\": \"\", \"minzoom\": %d, \"maxzoom\": %d, \"fields\": {", fk->second.minzoom, fk->second.maxzoom);
|
||||
state.json_write_hash();
|
||||
|
||||
state.json_write_string("id");
|
||||
state.json_write_string(lnames[i]);
|
||||
|
||||
state.json_write_string("description");
|
||||
state.json_write_string("");
|
||||
|
||||
state.json_write_string("minzoom");
|
||||
state.json_write_signed(fk->second.minzoom);
|
||||
|
||||
state.json_write_string("maxzoom");
|
||||
state.json_write_signed(fk->second.maxzoom);
|
||||
|
||||
state.json_write_string("fields");
|
||||
state.json_write_hash();
|
||||
state.nospace = true;
|
||||
|
||||
bool first = true;
|
||||
for (auto j = fk->second.file_keys.begin(); j != fk->second.file_keys.end(); ++j) {
|
||||
if (first) {
|
||||
first = false;
|
||||
} else {
|
||||
aprintf(&buf, ", ");
|
||||
}
|
||||
|
||||
aprintf(&buf, "\"");
|
||||
quote(buf, j->first.c_str());
|
||||
state.json_write_string(j->first);
|
||||
|
||||
int type = 0;
|
||||
for (auto s : j->second.sample_values) {
|
||||
@ -434,26 +418,31 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
}
|
||||
|
||||
if (type == (1 << mvt_double)) {
|
||||
aprintf(&buf, "\": \"Number\"");
|
||||
state.json_write_string("Number");
|
||||
} else if (type == (1 << mvt_bool)) {
|
||||
aprintf(&buf, "\": \"Boolean\"");
|
||||
state.json_write_string("Boolean");
|
||||
} else if (type == (1 << mvt_string)) {
|
||||
aprintf(&buf, "\": \"String\"");
|
||||
state.json_write_string("String");
|
||||
} else {
|
||||
aprintf(&buf, "\": \"Mixed\"");
|
||||
state.json_write_string("Mixed");
|
||||
}
|
||||
}
|
||||
|
||||
aprintf(&buf, "} }");
|
||||
state.nospace = true;
|
||||
state.json_end_hash();
|
||||
state.json_end_hash();
|
||||
}
|
||||
|
||||
aprintf(&buf, " ]");
|
||||
state.json_end_array();
|
||||
|
||||
if (do_tilestats && elements > 0) {
|
||||
aprintf(&buf, ",\"tilestats\": %s", tilestats(layermap, elements).c_str());
|
||||
state.nospace = true;
|
||||
state.json_write_string("tilestats");
|
||||
tilestats(layermap, elements, state);
|
||||
}
|
||||
|
||||
aprintf(&buf, "}");
|
||||
state.nospace = true;
|
||||
state.json_end_hash();
|
||||
}
|
||||
|
||||
sql = sqlite3_mprintf("INSERT INTO metadata (name, value) VALUES ('json', %Q);", buf.c_str());
|
||||
@ -479,7 +468,10 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
fprintf(fp, "{\n");
|
||||
json_writer state(fp);
|
||||
|
||||
state.json_write_hash();
|
||||
state.json_write_newline();
|
||||
|
||||
sqlite3_stmt *stmt;
|
||||
bool first = true;
|
||||
@ -494,19 +486,17 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
quote(key, k);
|
||||
quote(value, v);
|
||||
|
||||
if (!first) {
|
||||
fprintf(fp, ",\n");
|
||||
}
|
||||
fprintf(fp, " \"%s\": \"%s\"", key.c_str(), value.c_str());
|
||||
state.json_comma_newline();
|
||||
state.json_write_string(k);
|
||||
state.json_write_string(v);
|
||||
first = false;
|
||||
}
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
|
||||
fprintf(fp, "\n}\n");
|
||||
state.json_write_newline();
|
||||
state.json_end_hash();
|
||||
state.json_write_newline();
|
||||
fclose(fp);
|
||||
}
|
||||
}
|
||||
|
@ -51,8 +51,9 @@ void *run_writer(void *a) {
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_writer state(fp);
|
||||
for (size_t i = 0; i < wa->layers->size(); i++) {
|
||||
layer_to_geojson(fp, (*(wa->layers))[i], wa->z, wa->x, wa->y, false, true, false, true, 0, 0, 0, true);
|
||||
layer_to_geojson((*(wa->layers))[i], wa->z, wa->x, wa->y, false, true, false, true, 0, 0, 0, true, state);
|
||||
}
|
||||
|
||||
if (fclose(fp) != 0) {
|
||||
|
File diff suppressed because one or more lines are too long
8613
tests/muni/decode/multi.mbtiles.onetile.json
Normal file
8613
tests/muni/decode/multi.mbtiles.onetile.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "tests/raw-tiles/raw-tiles",
|
||||
"description": "tests/raw-tiles/raw-tiles",
|
||||
"version": "2",
|
||||
"minzoom": "0",
|
||||
"maxzoom": "14",
|
||||
"center": "-122.662354,45.514045,14",
|
||||
"bounds": "-122.682427,45.512331,-122.654961,45.569975",
|
||||
"type": "overlay",
|
||||
"format": "pbf",
|
||||
"json": "{\"vector_layers\": [ { \"id\": \"hackspots\", \"description\": \"\", \"minzoom\": 0, \"maxzoom\": 14, \"fields\": {\"Address\": \"String\", \"Name\": \"String\", \"Notes\": \"String\"} } ],\"tilestats\": {\"layerCount\": 1,\"layers\": [{\"layer\": \"hackspots\",\"count\": 4,\"geometry\": \"Point\",\"attributeCount\": 3,\"attributes\": [{\"attribute\": \"Address\",\"count\": 4,\"type\": \"string\",\"values\": [\"1507 N Rosa Parks Way Portland, OR 97217\",\"201 SE 12th Ave, Portland, OR 97214\",\"4637 N Albina Ave Portland, OR 97217\",\"915 SE Hawthorne Blvd. Portland, OR 97214\"]},{\"attribute\": \"Name\",\"count\": 4,\"type\": \"string\",\"values\": [\"Albina Press\",\"Arbor Lodge\",\"Lucky Labrador Brew Pub\",\"Three Friends Coffeehouse\"]},{\"attribute\": \"Notes\",\"count\": 3,\"type\": \"string\",\"values\": [\"\",\"Dog friendly\",\"usually busy, outlets on side wall only\"]}]}]}}"
|
||||
"name": "tests/raw-tiles/raw-tiles",
|
||||
"description": "tests/raw-tiles/raw-tiles",
|
||||
"version": "2",
|
||||
"minzoom": "0",
|
||||
"maxzoom": "14",
|
||||
"center": "-122.662354,45.514045,14",
|
||||
"bounds": "-122.682427,45.512331,-122.654961,45.569975",
|
||||
"type": "overlay",
|
||||
"format": "pbf",
|
||||
"json": "{\"vector_layers\": [ { \"id\": \"hackspots\", \"description\": \"\", \"minzoom\": 0, \"maxzoom\": 14, \"fields\": {\"Address\": \"String\", \"Name\": \"String\", \"Notes\": \"String\"} } ],\"tilestats\": {\"layerCount\": 1,\"layers\": [{\"layer\": \"hackspots\",\"count\": 4,\"geometry\": \"Point\",\"attributeCount\": 3,\"attributes\": [{\"attribute\": \"Address\",\"count\": 4,\"type\": \"string\",\"values\": [\"1507 N Rosa Parks Way Portland, OR 97217\",\"201 SE 12th Ave, Portland, OR 97214\",\"4637 N Albina Ave Portland, OR 97217\",\"915 SE Hawthorne Blvd. Portland, OR 97214\"]},{\"attribute\": \"Name\",\"count\": 4,\"type\": \"string\",\"values\": [\"Albina Press\",\"Arbor Lodge\",\"Lucky Labrador Brew Pub\",\"Three Friends Coffeehouse\"]},{\"attribute\": \"Notes\",\"count\": 3,\"type\": \"string\",\"values\": [\"\",\"Dog friendly\",\"usually busy, outlets on side wall only\"]}]}]}}"
|
||||
}
|
||||
|
3
tile.cpp
3
tile.cpp
@ -1443,6 +1443,7 @@ struct run_prefilter_args {
|
||||
|
||||
void *run_prefilter(void *v) {
|
||||
run_prefilter_args *rpa = (run_prefilter_args *) v;
|
||||
json_writer state(rpa->prefilter_fp);
|
||||
|
||||
while (1) {
|
||||
serial_feature sf = next_feature(rpa->geoms, rpa->geompos_in, rpa->metabase, rpa->meta_off, rpa->z, rpa->tx, rpa->ty, rpa->initial_x, rpa->initial_y, rpa->original_features, rpa->unclipped_features, rpa->nextzoom, rpa->maxzoom, rpa->minzoom, rpa->max_zoom_increment, rpa->pass, rpa->passes, rpa->along, rpa->alongminus, rpa->buffer, rpa->within, rpa->first_time, rpa->geomfile, rpa->geompos, rpa->oprogress, rpa->todo, rpa->fname, rpa->child_shards);
|
||||
@ -1479,7 +1480,7 @@ void *run_prefilter(void *v) {
|
||||
decode_meta(sf.m, sf.keys, sf.values, rpa->stringpool + rpa->pool_off[sf.segment], tmp_layer, tmp_feature);
|
||||
tmp_layer.features.push_back(tmp_feature);
|
||||
|
||||
layer_to_geojson(rpa->prefilter_fp, tmp_layer, 0, 0, 0, false, true, false, true, sf.index, sf.seq, sf.extent, true);
|
||||
layer_to_geojson(tmp_layer, 0, 0, 0, false, true, false, true, sf.index, sf.seq, sf.extent, true, state);
|
||||
}
|
||||
|
||||
if (fclose(rpa->prefilter_fp) != 0) {
|
||||
|
@ -1,6 +1,6 @@
|
||||
#ifndef VERSION_HPP
|
||||
#define VERSION_HPP
|
||||
|
||||
#define VERSION "tippecanoe v1.27.10\n"
|
||||
#define VERSION "tippecanoe v1.27.11\n"
|
||||
|
||||
#endif
|
||||
|
462
write_json.cpp
462
write_json.cpp
@ -1,5 +1,6 @@
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <string>
|
||||
@ -10,6 +11,222 @@
|
||||
#include "text.hpp"
|
||||
#include "milo/dtoa_milo.h"
|
||||
|
||||
void json_writer::json_adjust() {
|
||||
if (state.size() == 0) {
|
||||
state.push_back(JSON_WRITE_TOP);
|
||||
} else if (state[state.size() - 1] == JSON_WRITE_TOP) {
|
||||
addc('\n');
|
||||
state[state.size() - 1] = JSON_WRITE_TOP;
|
||||
} else if (state[state.size() - 1] == JSON_WRITE_HASH) {
|
||||
if (!nospace) {
|
||||
addc(' ');
|
||||
}
|
||||
nospace = false;
|
||||
state[state.size() - 1] = JSON_WRITE_HASH_KEY;
|
||||
} else if (state[state.size() - 1] == JSON_WRITE_HASH_KEY) {
|
||||
adds(": ");
|
||||
state[state.size() - 1] = JSON_WRITE_HASH_VALUE;
|
||||
} else if (state[state.size() - 1] == JSON_WRITE_HASH_VALUE) {
|
||||
if (wantnl) {
|
||||
adds(",\n");
|
||||
nospace = false;
|
||||
} else if (nospace) {
|
||||
addc(',');
|
||||
nospace = false;
|
||||
} else {
|
||||
adds(", ");
|
||||
}
|
||||
wantnl = false;
|
||||
state[state.size() - 1] = JSON_WRITE_HASH_KEY;
|
||||
} else if (state[state.size() - 1] == JSON_WRITE_ARRAY) {
|
||||
if (!nospace) {
|
||||
addc(' ');
|
||||
}
|
||||
nospace = false;
|
||||
state[state.size() - 1] = JSON_WRITE_ARRAY_ELEMENT;
|
||||
} else if (state[state.size() - 1] == JSON_WRITE_ARRAY_ELEMENT) {
|
||||
if (wantnl) {
|
||||
adds(",\n");
|
||||
nospace = false;
|
||||
} else if (nospace) {
|
||||
addc(',');
|
||||
nospace = false;
|
||||
} else {
|
||||
adds(", ");
|
||||
}
|
||||
wantnl = false;
|
||||
state[state.size() - 1] = JSON_WRITE_ARRAY_ELEMENT;
|
||||
} else {
|
||||
fprintf(stderr, "Impossible JSON state\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
void json_writer::json_write_array() {
|
||||
json_adjust();
|
||||
addc('[');
|
||||
|
||||
state.push_back(JSON_WRITE_ARRAY);
|
||||
}
|
||||
|
||||
void json_writer::json_end_array() {
|
||||
if (state.size() == 0) {
|
||||
fprintf(stderr, "End JSON array at top level\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_write_tok tok = state[state.size() - 1];
|
||||
state.pop_back();
|
||||
|
||||
if (tok == JSON_WRITE_ARRAY || tok == JSON_WRITE_ARRAY_ELEMENT) {
|
||||
if (!nospace) {
|
||||
addc(' ');
|
||||
}
|
||||
nospace = false;
|
||||
addc(']');
|
||||
} else {
|
||||
fprintf(stderr, "End JSON array with unexpected state\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
void json_writer::json_write_hash() {
|
||||
json_adjust();
|
||||
addc('{');
|
||||
|
||||
state.push_back(JSON_WRITE_HASH);
|
||||
}
|
||||
|
||||
void json_writer::json_end_hash() {
|
||||
if (state.size() == 0) {
|
||||
fprintf(stderr, "End JSON hash at top level\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_write_tok tok = state[state.size() - 1];
|
||||
state.pop_back();
|
||||
|
||||
if (tok == JSON_WRITE_HASH) {
|
||||
if (!nospace) {
|
||||
adds(" "); // Preserve accidental extra space from before
|
||||
}
|
||||
nospace = false;
|
||||
addc('}');
|
||||
} else if (tok == JSON_WRITE_HASH_VALUE) {
|
||||
if (!nospace) {
|
||||
addc(' ');
|
||||
}
|
||||
nospace = false;
|
||||
addc('}');
|
||||
} else {
|
||||
fprintf(stderr, "End JSON hash with unexpected state\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
void json_writer::json_write_string(std::string const &str) {
|
||||
json_adjust();
|
||||
|
||||
addc('"');
|
||||
for (size_t i = 0; i < str.size(); i++) {
|
||||
if (str[i] == '\\' || str[i] == '"') {
|
||||
aprintf("\\%c", str[i]);
|
||||
} else if ((unsigned char) str[i] < ' ') {
|
||||
aprintf("\\u%04x", str[i]);
|
||||
} else {
|
||||
addc(str[i]);
|
||||
}
|
||||
}
|
||||
addc('"');
|
||||
}
|
||||
|
||||
void json_writer::json_write_number(double d) {
|
||||
json_adjust();
|
||||
|
||||
adds(milo::dtoa_milo(d).c_str());
|
||||
}
|
||||
|
||||
// Just to avoid json_writer:: changing expected output format
|
||||
void json_writer::json_write_float(double d) {
|
||||
json_adjust();
|
||||
|
||||
aprintf("%f", d);
|
||||
}
|
||||
|
||||
void json_writer::json_write_unsigned(unsigned long long v) {
|
||||
json_adjust();
|
||||
|
||||
aprintf("%llu", v);
|
||||
}
|
||||
|
||||
void json_writer::json_write_signed(long long v) {
|
||||
json_adjust();
|
||||
|
||||
aprintf("%lld", v);
|
||||
}
|
||||
|
||||
void json_writer::json_write_stringified(std::string const &str) {
|
||||
json_adjust();
|
||||
|
||||
adds(str);
|
||||
}
|
||||
|
||||
void json_writer::json_write_bool(bool b) {
|
||||
json_adjust();
|
||||
|
||||
if (b) {
|
||||
adds("true");
|
||||
} else {
|
||||
adds("false");
|
||||
}
|
||||
}
|
||||
|
||||
void json_writer::json_write_null() {
|
||||
json_adjust();
|
||||
|
||||
adds("null");
|
||||
}
|
||||
|
||||
void json_writer::json_write_newline() {
|
||||
addc('\n');
|
||||
nospace = true;
|
||||
}
|
||||
|
||||
void json_writer::json_comma_newline() {
|
||||
wantnl = true;
|
||||
}
|
||||
|
||||
void json_writer::aprintf(const char *format, ...) {
|
||||
va_list ap;
|
||||
char *tmp;
|
||||
|
||||
va_start(ap, format);
|
||||
if (vasprintf(&tmp, format, ap) < 0) {
|
||||
fprintf(stderr, "memory allocation failure\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
va_end(ap);
|
||||
|
||||
adds(std::string(tmp, strlen(tmp)));
|
||||
free(tmp);
|
||||
}
|
||||
|
||||
void json_writer::addc(char c) {
|
||||
if (f != NULL) {
|
||||
putc(c, f);
|
||||
} else if (s != NULL) {
|
||||
s->push_back(c);
|
||||
}
|
||||
}
|
||||
|
||||
void json_writer::adds(std::string const &str) {
|
||||
if (f != NULL) {
|
||||
fputs(str.c_str(), f);
|
||||
} else if (s != NULL) {
|
||||
s->append(str);
|
||||
}
|
||||
}
|
||||
|
||||
struct lonlat {
|
||||
int op;
|
||||
double lon;
|
||||
@ -26,10 +243,10 @@ struct lonlat {
|
||||
}
|
||||
};
|
||||
|
||||
void print_val(FILE *fp, mvt_feature const &feature, mvt_layer const &layer, mvt_value const &val, size_t vo) {
|
||||
void print_val(mvt_feature const &feature, mvt_layer const &layer, mvt_value const &val, size_t vo, json_writer &state) {
|
||||
std::string s;
|
||||
stringify_val(s, feature, layer, val, vo);
|
||||
fprintf(fp, "%s", s.c_str());
|
||||
state.json_write_stringified(s);
|
||||
}
|
||||
|
||||
static void quote(std::string &buf, std::string const &s) {
|
||||
@ -107,85 +324,63 @@ void stringify_val(std::string &out, mvt_feature const &feature, mvt_layer const
|
||||
}
|
||||
}
|
||||
|
||||
void layer_to_geojson(FILE *fp, mvt_layer const &layer, unsigned z, unsigned x, unsigned y, bool comma, bool name, bool zoom, bool dropped, unsigned long long index, long long sequence, long long extent, bool complain) {
|
||||
void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y, bool comma, bool name, bool zoom, bool dropped, unsigned long long index, long long sequence, long long extent, bool complain, json_writer &state) {
|
||||
for (size_t f = 0; f < layer.features.size(); f++) {
|
||||
mvt_feature const &feat = layer.features[f];
|
||||
|
||||
if (comma && f != 0) {
|
||||
fprintf(fp, ",\n");
|
||||
}
|
||||
|
||||
fprintf(fp, "{ \"type\": \"Feature\"");
|
||||
state.json_write_hash();
|
||||
state.json_write_string("type");
|
||||
state.json_write_string("Feature");
|
||||
|
||||
if (feat.has_id) {
|
||||
fprintf(fp, ", \"id\": %llu", feat.id);
|
||||
state.json_write_string("id");
|
||||
state.json_write_unsigned(feat.id);
|
||||
}
|
||||
|
||||
if (name || zoom || index != 0 || sequence != 0 || extent != 0) {
|
||||
bool need_comma = false;
|
||||
|
||||
fprintf(fp, ", \"tippecanoe\": { ");
|
||||
state.json_write_string("tippecanoe");
|
||||
state.json_write_hash();
|
||||
|
||||
if (name) {
|
||||
if (need_comma) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "\"layer\": ");
|
||||
fprintq(fp, layer.name.c_str());
|
||||
need_comma = true;
|
||||
state.json_write_string("layer");
|
||||
state.json_write_string(layer.name);
|
||||
}
|
||||
|
||||
if (zoom) {
|
||||
if (need_comma) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "\"minzoom\": %u, ", z);
|
||||
fprintf(fp, "\"maxzoom\": %u", z);
|
||||
need_comma = true;
|
||||
state.json_write_string("minzoom");
|
||||
state.json_write_unsigned(z);
|
||||
|
||||
state.json_write_string("maxzoom");
|
||||
state.json_write_unsigned(z);
|
||||
}
|
||||
|
||||
if (dropped) {
|
||||
if (need_comma) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "\"dropped\": %s", feat.dropped ? "true" : "false");
|
||||
need_comma = true;
|
||||
state.json_write_string("dropped");
|
||||
state.json_write_bool(feat.dropped);
|
||||
}
|
||||
|
||||
if (index != 0) {
|
||||
if (need_comma) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "\"index\": %llu", index);
|
||||
need_comma = true;
|
||||
state.json_write_string("index");
|
||||
state.json_write_unsigned(index);
|
||||
}
|
||||
|
||||
if (sequence != 0) {
|
||||
if (need_comma) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "\"sequence\": %lld", sequence);
|
||||
need_comma = true;
|
||||
state.json_write_string("sequence");
|
||||
state.json_write_signed(sequence);
|
||||
}
|
||||
|
||||
if (extent != 0) {
|
||||
if (need_comma) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "\"extent\": %lld", extent);
|
||||
need_comma = true;
|
||||
state.json_write_string("extent");
|
||||
state.json_write_signed(extent);
|
||||
}
|
||||
|
||||
fprintf(fp, " }");
|
||||
state.json_end_hash();
|
||||
}
|
||||
|
||||
fprintf(fp, ", \"properties\": { ");
|
||||
state.json_write_string("properties");
|
||||
state.json_write_hash();
|
||||
|
||||
for (size_t t = 0; t + 1 < feat.tags.size(); t += 2) {
|
||||
if (t != 0) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
|
||||
if (feat.tags[t] >= layer.keys.size()) {
|
||||
fprintf(stderr, "Error: out of bounds feature key (%u in %zu)\n", feat.tags[t], layer.keys.size());
|
||||
exit(EXIT_FAILURE);
|
||||
@ -198,13 +393,14 @@ void layer_to_geojson(FILE *fp, mvt_layer const &layer, unsigned z, unsigned x,
|
||||
const char *key = layer.keys[feat.tags[t]].c_str();
|
||||
mvt_value const &val = layer.values[feat.tags[t + 1]];
|
||||
|
||||
fprintq(fp, key);
|
||||
fprintf(fp, ": ");
|
||||
|
||||
print_val(fp, feat, layer, val, feat.tags[t + 1]);
|
||||
state.json_write_string(key);
|
||||
print_val(feat, layer, val, feat.tags[t + 1], state);
|
||||
}
|
||||
|
||||
fprintf(fp, " }, \"geometry\": { ");
|
||||
state.json_end_hash();
|
||||
|
||||
state.json_write_string("geometry");
|
||||
state.json_write_hash();
|
||||
|
||||
std::vector<lonlat> ops;
|
||||
|
||||
@ -229,16 +425,30 @@ void layer_to_geojson(FILE *fp, mvt_layer const &layer, unsigned z, unsigned x,
|
||||
|
||||
if (feat.type == VT_POINT) {
|
||||
if (ops.size() == 1) {
|
||||
fprintf(fp, "\"type\": \"Point\", \"coordinates\": [ %f, %f ]", ops[0].lon, ops[0].lat);
|
||||
state.json_write_string("type");
|
||||
state.json_write_string("Point");
|
||||
|
||||
state.json_write_string("coordinates");
|
||||
|
||||
state.json_write_array();
|
||||
state.json_write_float(ops[0].lon);
|
||||
state.json_write_float(ops[0].lat);
|
||||
state.json_end_array();
|
||||
} else {
|
||||
fprintf(fp, "\"type\": \"MultiPoint\", \"coordinates\": [ ");
|
||||
state.json_write_string("type");
|
||||
state.json_write_string("MultiPoint");
|
||||
|
||||
state.json_write_string("coordinates");
|
||||
state.json_write_array();
|
||||
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
if (i != 0) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "[ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
state.json_write_array();
|
||||
state.json_write_float(ops[i].lon);
|
||||
state.json_write_float(ops[i].lat);
|
||||
state.json_end_array();
|
||||
}
|
||||
fprintf(fp, " ]");
|
||||
|
||||
state.json_end_array();
|
||||
}
|
||||
} else if (feat.type == VT_LINE) {
|
||||
int movetos = 0;
|
||||
@ -249,32 +459,59 @@ void layer_to_geojson(FILE *fp, mvt_layer const &layer, unsigned z, unsigned x,
|
||||
}
|
||||
|
||||
if (movetos < 2) {
|
||||
fprintf(fp, "\"type\": \"LineString\", \"coordinates\": [ ");
|
||||
state.json_write_string("type");
|
||||
state.json_write_string("LineString");
|
||||
|
||||
state.json_write_string("coordinates");
|
||||
state.json_write_array();
|
||||
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
if (i != 0) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
fprintf(fp, "[ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
state.json_write_array();
|
||||
state.json_write_float(ops[i].lon);
|
||||
state.json_write_float(ops[i].lat);
|
||||
state.json_end_array();
|
||||
}
|
||||
fprintf(fp, " ]");
|
||||
|
||||
state.json_end_array();
|
||||
} else {
|
||||
fprintf(fp, "\"type\": \"MultiLineString\", \"coordinates\": [ [ ");
|
||||
int state = 0;
|
||||
state.json_write_string("type");
|
||||
state.json_write_string("MultiLineString");
|
||||
|
||||
state.json_write_string("coordinates");
|
||||
state.json_write_array();
|
||||
state.json_write_array();
|
||||
|
||||
int sstate = 0;
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
if (ops[i].op == VT_MOVETO) {
|
||||
if (state == 0) {
|
||||
fprintf(fp, "[ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
state = 1;
|
||||
if (sstate == 0) {
|
||||
state.json_write_array();
|
||||
state.json_write_float(ops[i].lon);
|
||||
state.json_write_float(ops[i].lat);
|
||||
state.json_end_array();
|
||||
|
||||
sstate = 1;
|
||||
} else {
|
||||
fprintf(fp, " ], [ ");
|
||||
fprintf(fp, "[ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
state = 1;
|
||||
state.json_end_array();
|
||||
state.json_write_array();
|
||||
|
||||
state.json_write_array();
|
||||
state.json_write_float(ops[i].lon);
|
||||
state.json_write_float(ops[i].lat);
|
||||
state.json_end_array();
|
||||
|
||||
sstate = 1;
|
||||
}
|
||||
} else {
|
||||
fprintf(fp, ", [ %f, %f ]", ops[i].lon, ops[i].lat);
|
||||
state.json_write_array();
|
||||
state.json_write_float(ops[i].lon);
|
||||
state.json_write_float(ops[i].lat);
|
||||
state.json_end_array();
|
||||
}
|
||||
}
|
||||
fprintf(fp, " ] ]");
|
||||
|
||||
state.json_end_array();
|
||||
state.json_end_array();
|
||||
}
|
||||
} else if (feat.type == VT_POLYGON) {
|
||||
std::vector<std::vector<lonlat> > rings;
|
||||
@ -328,16 +565,27 @@ void layer_to_geojson(FILE *fp, mvt_layer const &layer, unsigned z, unsigned x,
|
||||
outer++;
|
||||
}
|
||||
|
||||
// fprintf(fp, "\"area\": %Lf,", area);
|
||||
// fprintf("\"area\": %Lf,", area);
|
||||
}
|
||||
|
||||
if (outer > 1) {
|
||||
fprintf(fp, "\"type\": \"MultiPolygon\", \"coordinates\": [ [ [ ");
|
||||
state.json_write_string("type");
|
||||
state.json_write_string("MultiPolygon");
|
||||
|
||||
state.json_write_string("coordinates");
|
||||
state.json_write_array();
|
||||
state.json_write_array();
|
||||
state.json_write_array();
|
||||
} else {
|
||||
fprintf(fp, "\"type\": \"Polygon\", \"coordinates\": [ [ ");
|
||||
state.json_write_string("type");
|
||||
state.json_write_string("Polygon");
|
||||
|
||||
state.json_write_string("coordinates");
|
||||
state.json_write_array();
|
||||
state.json_write_array();
|
||||
}
|
||||
|
||||
int state = 0;
|
||||
int sstate = 0;
|
||||
for (size_t i = 0; i < rings.size(); i++) {
|
||||
if (i == 0 && areas[i] < 0) {
|
||||
static bool warned = false;
|
||||
@ -353,45 +601,57 @@ void layer_to_geojson(FILE *fp, mvt_layer const &layer, unsigned z, unsigned x,
|
||||
}
|
||||
|
||||
if (areas[i] >= 0) {
|
||||
if (state != 0) {
|
||||
if (sstate != 0) {
|
||||
// new multipolygon
|
||||
fprintf(fp, " ] ], [ [ ");
|
||||
state.json_end_array();
|
||||
state.json_end_array();
|
||||
|
||||
state.json_write_array();
|
||||
state.json_write_array();
|
||||
}
|
||||
state = 1;
|
||||
sstate = 1;
|
||||
}
|
||||
|
||||
if (state == 2) {
|
||||
if (sstate == 2) {
|
||||
// new ring in the same polygon
|
||||
fprintf(fp, " ], [ ");
|
||||
state.json_end_array();
|
||||
state.json_write_array();
|
||||
}
|
||||
|
||||
for (size_t j = 0; j < rings[i].size(); j++) {
|
||||
if (rings[i][j].op != VT_CLOSEPATH) {
|
||||
if (j != 0) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
|
||||
fprintf(fp, "[ %f, %f ]", rings[i][j].lon, rings[i][j].lat);
|
||||
state.json_write_array();
|
||||
state.json_write_float(rings[i][j].lon);
|
||||
state.json_write_float(rings[i][j].lat);
|
||||
state.json_end_array();
|
||||
} else {
|
||||
if (j != 0) {
|
||||
fprintf(fp, ", ");
|
||||
}
|
||||
|
||||
fprintf(fp, "[ %f, %f ]", rings[i][0].lon, rings[i][0].lat);
|
||||
state.json_write_array();
|
||||
state.json_write_float(rings[i][0].lon);
|
||||
state.json_write_float(rings[i][0].lat);
|
||||
state.json_end_array();
|
||||
}
|
||||
}
|
||||
|
||||
state = 2;
|
||||
sstate = 2;
|
||||
}
|
||||
|
||||
if (outer > 1) {
|
||||
fprintf(fp, " ] ] ]");
|
||||
state.json_end_array();
|
||||
state.json_end_array();
|
||||
state.json_end_array();
|
||||
} else {
|
||||
fprintf(fp, " ] ]");
|
||||
state.json_end_array();
|
||||
state.json_end_array();
|
||||
}
|
||||
}
|
||||
|
||||
fprintf(fp, " } }\n");
|
||||
state.json_end_hash();
|
||||
state.json_end_hash();
|
||||
|
||||
if (comma) {
|
||||
state.json_write_newline();
|
||||
state.json_comma_newline();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,3 +1,67 @@
|
||||
void layer_to_geojson(FILE *fp, mvt_layer const &layer, unsigned z, unsigned x, unsigned y, bool comma, bool name, bool zoom, bool dropped, unsigned long long index, long long sequence, long long extent, bool complain);
|
||||
#ifndef WRITE_JSON_HPP
|
||||
#define WRITE_JSON_HPP
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <stdio.h>
|
||||
|
||||
enum json_write_tok {
|
||||
JSON_WRITE_HASH,
|
||||
JSON_WRITE_HASH_KEY,
|
||||
JSON_WRITE_HASH_VALUE,
|
||||
JSON_WRITE_ARRAY,
|
||||
JSON_WRITE_ARRAY_ELEMENT,
|
||||
JSON_WRITE_TOP,
|
||||
};
|
||||
|
||||
struct json_writer {
|
||||
std::vector<json_write_tok> state;
|
||||
bool nospace = false;
|
||||
bool wantnl = false;
|
||||
FILE *f = NULL;
|
||||
std::string *s = NULL;
|
||||
|
||||
~json_writer() {
|
||||
if (state.size() > 0) {
|
||||
if (state.size() != 1 || state[0] != JSON_WRITE_TOP) {
|
||||
fprintf(stderr, "JSON not closed at end\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
json_writer(FILE *fp) {
|
||||
f = fp;
|
||||
}
|
||||
|
||||
json_writer(std::string *out) {
|
||||
s = out;
|
||||
}
|
||||
|
||||
void json_write_array();
|
||||
void json_end_array();
|
||||
void json_write_hash();
|
||||
void json_end_hash();
|
||||
void json_write_string(std::string const &s);
|
||||
void json_write_number(double d);
|
||||
void json_write_float(double d);
|
||||
void json_write_unsigned(unsigned long long v);
|
||||
void json_write_signed(long long v);
|
||||
void json_write_stringified(std::string const &s);
|
||||
void json_write_bool(bool b);
|
||||
void json_write_null();
|
||||
void json_write_newline();
|
||||
void json_comma_newline();
|
||||
|
||||
private:
|
||||
void json_adjust();
|
||||
void aprintf(const char *format, ...);
|
||||
void addc(char c);
|
||||
void adds(std::string const &s);
|
||||
};
|
||||
|
||||
void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y, bool comma, bool name, bool zoom, bool dropped, unsigned long long index, long long sequence, long long extent, bool complain, json_writer &state);
|
||||
void fprintq(FILE *f, const char *s);
|
||||
void stringify_val(std::string &out, mvt_feature const &feature, mvt_layer const &layer, mvt_value const &val, size_t vo);
|
||||
|
||||
#endif
|
||||
|
Loading…
x
Reference in New Issue
Block a user