mirror of
https://github.com/mapbox/tippecanoe.git
synced 2025-04-10 04:19:54 +00:00
Once features can't possibly fit in a tile, stop trying (#9)
* Stop adding features to a tile if it can't possibly work * Add --integer and --fraction options to tippecanoe-decode * Carry the strategies field from tileset metadata through tile-join * Update changelog * Assign different codes to different kinds of error exits
This commit is contained in:
parent
ed31b9ac84
commit
af1a7ed7ae
@ -1,6 +1,12 @@
|
||||
## 2.6.2
|
||||
|
||||
* Stop adding features to a tile if it can't possibly work, to limit memory use
|
||||
* Add --integer and --fraction options to tippecanoe-decode
|
||||
* Carry `strategies` field from tileset metadata through tile-join
|
||||
|
||||
## 2.6.1
|
||||
|
||||
Upgrade protozero to version 1.7.1
|
||||
* Upgrade protozero to version 1.7.1
|
||||
|
||||
## 2.6.0
|
||||
|
||||
|
4
Makefile
4
Makefile
@ -160,10 +160,14 @@ decode-test:
|
||||
mkdir -p tests/muni/decode
|
||||
./tippecanoe -q -z11 -Z11 -f -o tests/muni/decode/multi.mbtiles tests/muni/*.json
|
||||
./tippecanoe-decode -x generator -l subway tests/muni/decode/multi.mbtiles > tests/muni/decode/multi.mbtiles.json.check
|
||||
./tippecanoe-decode -x generator -l subway --integer tests/muni/decode/multi.mbtiles > tests/muni/decode/multi.mbtiles.integer.json.check
|
||||
./tippecanoe-decode -x generator -l subway --fraction tests/muni/decode/multi.mbtiles > tests/muni/decode/multi.mbtiles.fraction.json.check
|
||||
./tippecanoe-decode -x generator -c tests/muni/decode/multi.mbtiles > tests/muni/decode/multi.mbtiles.pipeline.json.check
|
||||
./tippecanoe-decode -x generator tests/muni/decode/multi.mbtiles 11 327 791 > tests/muni/decode/multi.mbtiles.onetile.json.check
|
||||
./tippecanoe-decode -x generator --stats tests/muni/decode/multi.mbtiles > tests/muni/decode/multi.mbtiles.stats.json.check
|
||||
cmp tests/muni/decode/multi.mbtiles.json.check tests/muni/decode/multi.mbtiles.json
|
||||
cmp tests/muni/decode/multi.mbtiles.integer.json.check tests/muni/decode/multi.mbtiles.integer.json
|
||||
cmp tests/muni/decode/multi.mbtiles.fraction.json.check tests/muni/decode/multi.mbtiles.fraction.json
|
||||
cmp tests/muni/decode/multi.mbtiles.pipeline.json.check tests/muni/decode/multi.mbtiles.pipeline.json
|
||||
cmp tests/muni/decode/multi.mbtiles.onetile.json.check tests/muni/decode/multi.mbtiles.onetile.json
|
||||
cmp tests/muni/decode/multi.mbtiles.stats.json.check tests/muni/decode/multi.mbtiles.stats.json
|
||||
|
@ -880,6 +880,8 @@ resolutions.
|
||||
* `-c` or `--tag-layer-and-zoom`: Include each feature's layer and zoom level as part of its `tippecanoe` object rather than as a FeatureCollection wrapper
|
||||
* `-S` or `--stats`: Just report statistics about each tile's size and the number of features in it, as a JSON structure.
|
||||
* `-f` or `--force`: Decode tiles even if polygon ring order or closure problems are detected
|
||||
* `-I` or `--integer`: Report coordinates in integer tile coordinates
|
||||
* `-F` or `--fraction`: Report coordinates as a fraction of the tile extent
|
||||
|
||||
tippecanoe-json-tool
|
||||
====================
|
||||
|
9
csv.cpp
9
csv.cpp
@ -1,5 +1,6 @@
|
||||
#include "csv.hpp"
|
||||
#include "text.hpp"
|
||||
#include "errors.hpp"
|
||||
|
||||
std::vector<std::string> csv_split(const char *s) {
|
||||
std::vector<std::string> ret;
|
||||
@ -68,7 +69,7 @@ void readcsv(const char *fn, std::vector<std::string> &header, std::map<std::str
|
||||
FILE *f = fopen(fn, "r");
|
||||
if (f == NULL) {
|
||||
perror(fn);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
std::string s;
|
||||
@ -76,7 +77,7 @@ void readcsv(const char *fn, std::vector<std::string> &header, std::map<std::str
|
||||
std::string err = check_utf8(s);
|
||||
if (err != "") {
|
||||
fprintf(stderr, "%s: %s\n", fn, err.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_UTF8);
|
||||
}
|
||||
|
||||
header = csv_split(s.c_str());
|
||||
@ -89,7 +90,7 @@ void readcsv(const char *fn, std::vector<std::string> &header, std::map<std::str
|
||||
std::string err = check_utf8(s);
|
||||
if (err != "") {
|
||||
fprintf(stderr, "%s: %s\n", fn, err.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_UTF8);
|
||||
}
|
||||
|
||||
std::vector<std::string> line = csv_split(s.c_str());
|
||||
@ -105,7 +106,7 @@ void readcsv(const char *fn, std::vector<std::string> &header, std::map<std::str
|
||||
|
||||
if (fclose(f) != 0) {
|
||||
perror("fclose");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
}
|
||||
|
||||
|
72
decode.cpp
72
decode.cpp
@ -22,6 +22,7 @@
|
||||
#include "write_json.hpp"
|
||||
#include "jsonpull/jsonpull.h"
|
||||
#include "dirtiles.hpp"
|
||||
#include "errors.hpp"
|
||||
|
||||
int minzoom = 0;
|
||||
int maxzoom = 32;
|
||||
@ -85,18 +86,18 @@ void do_stats(mvt_tile &tile, size_t size, bool compressed, int z, unsigned x, u
|
||||
state.json_write_newline();
|
||||
}
|
||||
|
||||
void handle(std::string message, int z, unsigned x, unsigned y, std::set<std::string> const &to_decode, bool pipeline, bool stats, json_writer &state) {
|
||||
void handle(std::string message, int z, unsigned x, unsigned y, std::set<std::string> const &to_decode, bool pipeline, bool stats, json_writer &state, int coordinate_mode) {
|
||||
mvt_tile tile;
|
||||
bool was_compressed;
|
||||
|
||||
try {
|
||||
if (!tile.decode(message, was_compressed)) {
|
||||
fprintf(stderr, "Couldn't parse tile %d/%u/%u\n", z, x, y);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_MVT);
|
||||
}
|
||||
} catch (std::exception const &e) {
|
||||
fprintf(stderr, "PBF decoding error in tile %d/%u/%u\n", z, x, y);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PROTOBUF);
|
||||
}
|
||||
|
||||
if (stats) {
|
||||
@ -159,7 +160,7 @@ void handle(std::string message, int z, unsigned x, unsigned y, std::set<std::st
|
||||
|
||||
if (layer.extent <= 0) {
|
||||
fprintf(stderr, "Impossible layer extent %lld in mbtiles\n", layer.extent);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
if (to_decode.size() != 0 && !to_decode.count(layer.name)) {
|
||||
@ -202,10 +203,16 @@ void handle(std::string message, int z, unsigned x, unsigned y, std::set<std::st
|
||||
// X and Y are unsigned, so no need to check <0
|
||||
if (x > (1ULL << z) || y > (1ULL << z)) {
|
||||
fprintf(stderr, "Impossible tile %d/%u/%u\n", z, x, y);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
layer_to_geojson(layer, z, x, y, !pipeline, pipeline, pipeline, false, 0, 0, 0, !force, state);
|
||||
double scale = 0;
|
||||
if (coordinate_mode == 1) { // fraction
|
||||
scale = layer.extent;
|
||||
} else if (coordinate_mode == 2) { // integer
|
||||
scale = 1;
|
||||
}
|
||||
layer_to_geojson(layer, z, x, y, !pipeline, pipeline, pipeline, false, 0, 0, 0, !force, state, scale);
|
||||
|
||||
if (!pipeline) {
|
||||
if (true) {
|
||||
@ -223,7 +230,7 @@ void handle(std::string message, int z, unsigned x, unsigned y, std::set<std::st
|
||||
}
|
||||
}
|
||||
|
||||
void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> const &to_decode, bool pipeline, bool stats, std::set<std::string> const &exclude_meta) {
|
||||
void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> const &to_decode, bool pipeline, bool stats, std::set<std::string> const &exclude_meta, int coordinate_mode) {
|
||||
sqlite3 *db = NULL;
|
||||
bool isdir = false;
|
||||
int oz = z;
|
||||
@ -240,12 +247,12 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
if (strcmp(map, "SQLite format 3") != 0) {
|
||||
if (z >= 0) {
|
||||
std::string s = std::string(map, st.st_size);
|
||||
handle(s, z, x, y, to_decode, pipeline, stats, state);
|
||||
handle(s, z, x, y, to_decode, pipeline, stats, state, coordinate_mode);
|
||||
munmap(map, st.st_size);
|
||||
return;
|
||||
} else {
|
||||
fprintf(stderr, "Must specify zoom/x/y to decode a single pbf file\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_ARGS);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -256,7 +263,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
}
|
||||
if (close(fd) != 0) {
|
||||
perror("close");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
} else {
|
||||
perror(fname);
|
||||
@ -272,13 +279,13 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
} else {
|
||||
if (sqlite3_open(fname, &db) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
char *err = NULL;
|
||||
if (sqlite3_exec(db, "PRAGMA integrity_check;", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: integrity_check: %s\n", fname, err);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -299,7 +306,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
sqlite3_stmt *stmt2;
|
||||
if (sqlite3_prepare_v2(db, sql2, -1, &stmt2, NULL) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: select failed: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
while (sqlite3_step(stmt2) == SQLITE_ROW) {
|
||||
@ -308,7 +315,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
|
||||
if (name == NULL || value == NULL) {
|
||||
fprintf(stderr, "Corrupt mbtiles file: null metadata\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
if (exclude_meta.count((char *) name) == 0) {
|
||||
@ -361,7 +368,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
FILE *f = fopen(fn.c_str(), "rb");
|
||||
if (f == NULL) {
|
||||
perror(fn.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
std::string s;
|
||||
@ -372,14 +379,14 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
}
|
||||
fclose(f);
|
||||
|
||||
handle(s, tiles[i].z, tiles[i].x, tiles[i].y, to_decode, pipeline, stats, state);
|
||||
handle(s, tiles[i].z, tiles[i].x, tiles[i].y, to_decode, pipeline, stats, state, coordinate_mode);
|
||||
}
|
||||
} else {
|
||||
const char *sql = "SELECT tile_data, zoom_level, tile_column, tile_row from tiles where zoom_level between ? and ? order by zoom_level, tile_column, tile_row;";
|
||||
sqlite3_stmt *stmt;
|
||||
if (sqlite3_prepare_v2(db, sql, -1, &stmt, NULL) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: select failed: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
sqlite3_bind_int(stmt, 1, minzoom);
|
||||
@ -407,7 +414,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
|
||||
if (tz < 0 || tz >= 32) {
|
||||
fprintf(stderr, "Impossible zoom level %d in mbtiles\n", tz);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
ty = (1LL << tz) - 1 - ty;
|
||||
@ -415,10 +422,10 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
|
||||
if (s == NULL) {
|
||||
fprintf(stderr, "Corrupt mbtiles file: null entry in tiles table\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
handle(std::string(s, len), tz, tx, ty, to_decode, pipeline, stats, state);
|
||||
handle(std::string(s, len), tz, tx, ty, to_decode, pipeline, stats, state, coordinate_mode);
|
||||
}
|
||||
|
||||
sqlite3_finalize(stmt);
|
||||
@ -443,7 +450,7 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
sqlite3_stmt *stmt;
|
||||
if (sqlite3_prepare_v2(db, sql, -1, &stmt, NULL) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: select failed: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
sqlite3_bind_int(stmt, 1, z);
|
||||
@ -456,14 +463,14 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
|
||||
if (s == NULL) {
|
||||
fprintf(stderr, "Corrupt mbtiles file: null entry in tiles table\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
if (z != oz) {
|
||||
fprintf(stderr, "%s: Warning: using tile %d/%u/%u instead of %d/%u/%u\n", fname, z, x, y, oz, ox, oy);
|
||||
}
|
||||
|
||||
handle(std::string(s, len), z, x, y, to_decode, pipeline, stats, state);
|
||||
handle(std::string(s, len), z, x, y, to_decode, pipeline, stats, state, coordinate_mode);
|
||||
handled = 1;
|
||||
}
|
||||
|
||||
@ -477,13 +484,13 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
|
||||
if (sqlite3_close(db) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: could not close database: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
}
|
||||
|
||||
void usage(char **argv) {
|
||||
fprintf(stderr, "Usage: %s [-s projection] [-Z minzoom] [-z maxzoom] [-l layer ...] file.mbtiles [zoom x y]\n", argv[0]);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_ARGS);
|
||||
}
|
||||
|
||||
int main(int argc, char **argv) {
|
||||
@ -494,9 +501,12 @@ int main(int argc, char **argv) {
|
||||
bool pipeline = false;
|
||||
bool stats = false;
|
||||
std::set<std::string> exclude_meta;
|
||||
int coordinate_mode = 0;
|
||||
|
||||
struct option long_options[] = {
|
||||
{"projection", required_argument, 0, 's'},
|
||||
{"fractional-coordinates", no_argument, 0, 'F'},
|
||||
{"integer-coordinates", no_argument, 0, 'I'},
|
||||
{"maximum-zoom", required_argument, 0, 'z'},
|
||||
{"minimum-zoom", required_argument, 0, 'Z'},
|
||||
{"layer", required_argument, 0, 'l'},
|
||||
@ -527,6 +537,14 @@ int main(int argc, char **argv) {
|
||||
set_projection_or_exit(optarg);
|
||||
break;
|
||||
|
||||
case 'F':
|
||||
coordinate_mode = 1;
|
||||
break;
|
||||
|
||||
case 'I':
|
||||
coordinate_mode = 2;
|
||||
break;
|
||||
|
||||
case 'z':
|
||||
maxzoom = atoi(optarg);
|
||||
break;
|
||||
@ -561,9 +579,9 @@ int main(int argc, char **argv) {
|
||||
}
|
||||
|
||||
if (argc == optind + 4) {
|
||||
decode(argv[optind], atoi(argv[optind + 1]), atoi(argv[optind + 2]), atoi(argv[optind + 3]), to_decode, pipeline, stats, exclude_meta);
|
||||
decode(argv[optind], atoi(argv[optind + 1]), atoi(argv[optind + 2]), atoi(argv[optind + 3]), to_decode, pipeline, stats, exclude_meta, coordinate_mode);
|
||||
} else if (argc == optind + 1) {
|
||||
decode(argv[optind], -1, -1, -1, to_decode, pipeline, stats, exclude_meta);
|
||||
decode(argv[optind], -1, -1, -1, to_decode, pipeline, stats, exclude_meta, coordinate_mode);
|
||||
} else {
|
||||
usage(argv);
|
||||
}
|
||||
|
21
dirtiles.cpp
21
dirtiles.cpp
@ -12,6 +12,7 @@
|
||||
#include <sqlite3.h>
|
||||
#include "jsonpull/jsonpull.h"
|
||||
#include "dirtiles.hpp"
|
||||
#include "errors.hpp"
|
||||
|
||||
std::string dir_read_tile(std::string base, struct zxy tile) {
|
||||
std::ifstream pbfFile(base + "/" + tile.path(), std::ios::in | std::ios::binary);
|
||||
@ -35,7 +36,7 @@ void dir_write_tile(const char *outdir, int z, int tx, int ty, std::string const
|
||||
struct stat st;
|
||||
if (stat(newdir.c_str(), &st) == 0) {
|
||||
fprintf(stderr, "Can't write tile to already existing %s\n", newdir.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_EXISTS);
|
||||
}
|
||||
|
||||
std::ofstream pbfFile(newdir, std::ios::out | std::ios::binary);
|
||||
@ -75,7 +76,7 @@ void check_dir(const char *dir, char **argv, bool force, bool forcetable) {
|
||||
fprintf(stderr, "%s: Tileset \"%s\" already exists. You can use --force if you want to delete the old tileset.\n", argv[0], dir);
|
||||
fprintf(stderr, "%s: %s: file exists\n", argv[0], meta.c_str());
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_EXISTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -93,11 +94,11 @@ void check_dir(const char *dir, char **argv, bool force, bool forcetable) {
|
||||
if (force) {
|
||||
if (unlink(fn.c_str()) != 0) {
|
||||
perror(fn.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_UNLINK);
|
||||
}
|
||||
} else {
|
||||
fprintf(stderr, "%s: file exists\n", fn.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_EXISTS);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -116,7 +117,7 @@ std::vector<zxy> enumerate_dirtiles(const char *fname, int minzoom, int maxzoom)
|
||||
DIR *d2 = opendir(z.c_str());
|
||||
if (d2 == NULL) {
|
||||
perror(z.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
struct dirent *dp2;
|
||||
@ -128,7 +129,7 @@ std::vector<zxy> enumerate_dirtiles(const char *fname, int minzoom, int maxzoom)
|
||||
DIR *d3 = opendir(x.c_str());
|
||||
if (d3 == NULL) {
|
||||
perror(x.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
struct dirent *dp3;
|
||||
@ -165,11 +166,11 @@ sqlite3 *dirmeta2tmp(const char *fname) {
|
||||
|
||||
if (sqlite3_open("", &db) != SQLITE_OK) {
|
||||
fprintf(stderr, "Temporary db: %s\n", sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
if (sqlite3_exec(db, "CREATE TABLE metadata (name text, value text);", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "Create metadata table: %s\n", err);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
std::string name = fname;
|
||||
@ -183,12 +184,12 @@ sqlite3 *dirmeta2tmp(const char *fname) {
|
||||
json_object *o = json_read_tree(jp);
|
||||
if (o == NULL) {
|
||||
fprintf(stderr, "%s: metadata parsing error: %s\n", name.c_str(), jp->error);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
if (o->type != JSON_HASH) {
|
||||
fprintf(stderr, "%s: bad metadata format\n", name.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < o->value.object.length; i++) {
|
||||
|
@ -2,19 +2,20 @@
|
||||
#include <stdlib.h>
|
||||
#include <unistd.h>
|
||||
#include <sqlite3.h>
|
||||
#include "errors.hpp"
|
||||
|
||||
void enumerate(char *fname) {
|
||||
sqlite3 *db;
|
||||
|
||||
if (sqlite3_open(fname, &db) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
char *err = NULL;
|
||||
if (sqlite3_exec(db, "PRAGMA integrity_check;", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: integrity_check: %s\n", fname, err);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
const char *sql = "SELECT zoom_level, tile_column, tile_row from tiles order by zoom_level, tile_column, tile_row;";
|
||||
@ -22,7 +23,7 @@ void enumerate(char *fname) {
|
||||
sqlite3_stmt *stmt;
|
||||
if (sqlite3_prepare_v2(db, sql, -1, &stmt, NULL) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: select failed: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
while (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
@ -32,7 +33,7 @@ void enumerate(char *fname) {
|
||||
|
||||
if (zoom < 0 || zoom > 31) {
|
||||
fprintf(stderr, "Corrupt mbtiles file: impossible zoom level %lld\n", zoom);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
y = (1LL << zoom) - 1 - y;
|
||||
@ -43,13 +44,13 @@ void enumerate(char *fname) {
|
||||
|
||||
if (sqlite3_close(db) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: could not close database: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
}
|
||||
|
||||
void usage(char **argv) {
|
||||
fprintf(stderr, "Usage: %s file.mbtiles ...\n", argv[0]);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_ARGS);
|
||||
}
|
||||
|
||||
int main(int argc, char **argv) {
|
||||
|
21
errors.hpp
Normal file
21
errors.hpp
Normal file
@ -0,0 +1,21 @@
|
||||
#define EXIT_INCOMPLETE 100
|
||||
#define EXIT_ARGS 101
|
||||
#define EXIT_CLOSE 102
|
||||
#define EXIT_CSV 103
|
||||
#define EXIT_EXISTS 104
|
||||
#define EXIT_FILTER 105
|
||||
#define EXIT_IMPOSSIBLE 106
|
||||
#define EXIT_JSON 107
|
||||
#define EXIT_MEMORY 108
|
||||
#define EXIT_MVT 109
|
||||
#define EXIT_NODATA 110
|
||||
#define EXIT_OPEN 111
|
||||
#define EXIT_PROTOBUF 112
|
||||
#define EXIT_PTHREAD 113
|
||||
#define EXIT_READ 114
|
||||
#define EXIT_SEEK 115
|
||||
#define EXIT_SQLITE 116
|
||||
#define EXIT_STAT 117
|
||||
#define EXIT_UNLINK 118
|
||||
#define EXIT_UTF8 119
|
||||
#define EXIT_WRITE 120
|
@ -4,6 +4,7 @@
|
||||
#include <map>
|
||||
#include "mvt.hpp"
|
||||
#include "evaluator.hpp"
|
||||
#include "errors.hpp"
|
||||
|
||||
int compare(mvt_value one, json_object *two, bool &fail) {
|
||||
if (one.type == mvt_string) {
|
||||
@ -34,7 +35,7 @@ int compare(mvt_value one, json_object *two, bool &fail) {
|
||||
v = one.numeric_value.sint_value;
|
||||
} else {
|
||||
fprintf(stderr, "Internal error: bad mvt type %d\n", one.type);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
if (v < two->value.number.number) {
|
||||
@ -66,36 +67,36 @@ int compare(mvt_value one, json_object *two, bool &fail) {
|
||||
}
|
||||
|
||||
fprintf(stderr, "Internal error: bad mvt type %d\n", one.type);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
bool eval(std::map<std::string, mvt_value> const &feature, json_object *f, std::set<std::string> &exclude_attributes) {
|
||||
if (f == NULL || f->type != JSON_ARRAY) {
|
||||
fprintf(stderr, "Filter is not an array: %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_FILTER);
|
||||
}
|
||||
|
||||
if (f->value.array.length < 1) {
|
||||
fprintf(stderr, "Array too small in filter: %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_FILTER);
|
||||
}
|
||||
|
||||
if (f->value.array.array[0]->type != JSON_STRING) {
|
||||
fprintf(stderr, "Filter operation is not a string: %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_FILTER);
|
||||
}
|
||||
|
||||
if (strcmp(f->value.array.array[0]->value.string.string, "has") == 0 ||
|
||||
strcmp(f->value.array.array[0]->value.string.string, "!has") == 0) {
|
||||
if (f->value.array.length != 2) {
|
||||
fprintf(stderr, "Wrong number of array elements in filter: %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_FILTER);
|
||||
}
|
||||
|
||||
if (strcmp(f->value.array.array[0]->value.string.string, "has") == 0) {
|
||||
if (f->value.array.array[1]->type != JSON_STRING) {
|
||||
fprintf(stderr, "\"has\" key is not a string: %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_FILTER);
|
||||
}
|
||||
return feature.count(std::string(f->value.array.array[1]->value.string.string)) != 0;
|
||||
}
|
||||
@ -103,7 +104,7 @@ bool eval(std::map<std::string, mvt_value> const &feature, json_object *f, std::
|
||||
if (strcmp(f->value.array.array[0]->value.string.string, "!has") == 0) {
|
||||
if (f->value.array.array[1]->type != JSON_STRING) {
|
||||
fprintf(stderr, "\"!has\" key is not a string: %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_FILTER);
|
||||
}
|
||||
return feature.count(std::string(f->value.array.array[1]->value.string.string)) == 0;
|
||||
}
|
||||
@ -117,11 +118,11 @@ bool eval(std::map<std::string, mvt_value> const &feature, json_object *f, std::
|
||||
strcmp(f->value.array.array[0]->value.string.string, "<=") == 0) {
|
||||
if (f->value.array.length != 3) {
|
||||
fprintf(stderr, "Wrong number of array elements in filter: %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_FILTER);
|
||||
}
|
||||
if (f->value.array.array[1]->type != JSON_STRING) {
|
||||
fprintf(stderr, "\"!has\" key is not a string: %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
fprintf(stderr, "comparison key is not a string: %s\n", json_stringify(f));
|
||||
exit(EXIT_FILTER);
|
||||
}
|
||||
|
||||
auto ff = feature.find(std::string(f->value.array.array[1]->value.string.string));
|
||||
@ -176,7 +177,7 @@ bool eval(std::map<std::string, mvt_value> const &feature, json_object *f, std::
|
||||
}
|
||||
|
||||
fprintf(stderr, "Internal error: can't happen: %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
if (strcmp(f->value.array.array[0]->value.string.string, "all") == 0 ||
|
||||
@ -217,12 +218,12 @@ bool eval(std::map<std::string, mvt_value> const &feature, json_object *f, std::
|
||||
strcmp(f->value.array.array[0]->value.string.string, "!in") == 0) {
|
||||
if (f->value.array.length < 2) {
|
||||
fprintf(stderr, "Array too small in filter: %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_FILTER);
|
||||
}
|
||||
|
||||
if (f->value.array.array[1]->type != JSON_STRING) {
|
||||
fprintf(stderr, "\"!has\" key is not a string: %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
fprintf(stderr, "\"!in\" key is not a string: %s\n", json_stringify(f));
|
||||
exit(EXIT_FILTER);
|
||||
}
|
||||
|
||||
auto ff = feature.find(std::string(f->value.array.array[1]->value.string.string));
|
||||
@ -272,12 +273,12 @@ bool eval(std::map<std::string, mvt_value> const &feature, json_object *f, std::
|
||||
if (strcmp(f->value.array.array[0]->value.string.string, "attribute-filter") == 0) {
|
||||
if (f->value.array.length != 3) {
|
||||
fprintf(stderr, "Wrong number of array elements in filter: %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_FILTER);
|
||||
}
|
||||
|
||||
if (f->value.array.array[1]->type != JSON_STRING) {
|
||||
fprintf(stderr, "\"attribute-filter\" key is not a string: %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_FILTER);
|
||||
}
|
||||
|
||||
bool ok = eval(feature, f->value.array.array[2], exclude_attributes);
|
||||
@ -289,13 +290,13 @@ bool eval(std::map<std::string, mvt_value> const &feature, json_object *f, std::
|
||||
}
|
||||
|
||||
fprintf(stderr, "Unknown filter %s\n", json_stringify(f));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_FILTER);
|
||||
}
|
||||
|
||||
bool evaluate(std::map<std::string, mvt_value> const &feature, std::string const &layer, json_object *filter, std::set<std::string> &exclude_attributes) {
|
||||
if (filter == NULL || filter->type != JSON_HASH) {
|
||||
fprintf(stderr, "Error: filter is not a hash: %s\n", json_stringify(filter));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
bool ok = true;
|
||||
@ -318,14 +319,14 @@ json_object *read_filter(const char *fname) {
|
||||
FILE *fp = fopen(fname, "r");
|
||||
if (fp == NULL) {
|
||||
perror(fname);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
json_pull *jp = json_begin_file(fp);
|
||||
json_object *filter = json_read_tree(jp);
|
||||
if (filter == NULL) {
|
||||
fprintf(stderr, "%s: %s\n", fname, jp->error);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
json_disconnect(filter);
|
||||
json_end(jp);
|
||||
@ -339,7 +340,7 @@ json_object *parse_filter(const char *s) {
|
||||
if (filter == NULL) {
|
||||
fprintf(stderr, "Could not parse filter %s\n", s);
|
||||
fprintf(stderr, "%s\n", jp->error);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
json_disconnect(filter);
|
||||
json_end(jp);
|
||||
|
@ -6,6 +6,7 @@
|
||||
#include "flatgeobuf/header_generated.h"
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "main.hpp"
|
||||
#include "errors.hpp"
|
||||
|
||||
static constexpr uint8_t magicbytes[8] = { 0x66, 0x67, 0x62, 0x03, 0x66, 0x67, 0x62, 0x01 };
|
||||
|
||||
@ -95,7 +96,7 @@ drawvec readGeometry(const FlatGeobuf::Geometry *geometry, FlatGeobuf::GeometryT
|
||||
return dv;
|
||||
} else {
|
||||
fprintf(stderr, "flatgeobuf has unsupported geometry type %u\n", (unsigned int)h_geometry_type);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -124,7 +125,7 @@ void readFeature(const FlatGeobuf::Feature *feature, long long feature_sequence_
|
||||
case FlatGeobuf::GeometryType::GeometryCollection :
|
||||
default:
|
||||
fprintf(stderr, "flatgeobuf has unsupported geometry type %u\n", (unsigned int)h_geometry_type);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
serial_feature sf;
|
||||
@ -236,7 +237,7 @@ void readFeature(const FlatGeobuf::Feature *feature, long long feature_sequence_
|
||||
} else {
|
||||
// Binary is not representable in MVT
|
||||
fprintf(stderr, "flatgeobuf has unsupported column type %u\n", (unsigned int)col_type);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
full_keys.push_back(h_column_names[col_idx]);
|
||||
full_values.push_back(sv);
|
||||
@ -304,7 +305,7 @@ void fgbRunQueue() {
|
||||
for (size_t i = 0; i < CPUS; i++) {
|
||||
if (pthread_create(&pthreads[i], NULL, fgb_run_parse_feature, &qra[i]) != 0) {
|
||||
perror("pthread_create");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
}
|
||||
|
||||
@ -347,7 +348,7 @@ void parse_flatgeobuf(std::vector<struct serialization_state> *sst, const char *
|
||||
const auto ok = FlatGeobuf::VerifySizePrefixedHeaderBuffer(v);
|
||||
if (!ok) {
|
||||
fprintf(stderr, "flatgeobuf header verification failed\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
auto header = FlatGeobuf::GetSizePrefixedHeader(src + sizeof(magicbytes));
|
||||
@ -384,7 +385,7 @@ void parse_flatgeobuf(std::vector<struct serialization_state> *sst, const char *
|
||||
const auto ok2 = FlatGeobuf::VerifySizePrefixedFeatureBuffer(v2);
|
||||
if (!ok2) {
|
||||
fprintf(stderr, "flatgeobuf feature buffer verification failed\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
auto feature = FlatGeobuf::GetSizePrefixedFeature(start);
|
||||
@ -396,4 +397,4 @@ void parse_flatgeobuf(std::vector<struct serialization_state> *sst, const char *
|
||||
}
|
||||
|
||||
fgbRunQueue();
|
||||
}
|
||||
}
|
||||
|
15
geobuf.cpp
15
geobuf.cpp
@ -14,6 +14,7 @@
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "jsonpull/jsonpull.h"
|
||||
#include "text.hpp"
|
||||
#include "errors.hpp"
|
||||
|
||||
#define POINT 0
|
||||
#define MULTIPOINT 1
|
||||
@ -37,7 +38,7 @@ static std::vector<queued_feature> feature_queue;
|
||||
void ensureDim(size_t dim) {
|
||||
if (dim < 2) {
|
||||
fprintf(stderr, "Geometry has fewer than 2 dimensions: %zu\n", dim);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -117,7 +118,7 @@ drawvec readLinePart(std::vector<long long> &coords, size_t dim, double e, size_
|
||||
for (size_t i = start; i + dim - 1 < end; i += dim) {
|
||||
if (i + dim - 1 >= coords.size()) {
|
||||
fprintf(stderr, "Internal error: line segment %zu vs %zu\n", i + dim - 1, coords.size());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
for (size_t d = 0; d < dim; d++) {
|
||||
@ -328,12 +329,12 @@ void readFeature(protozero::pbf_reader &pbf, size_t dim, double e, std::vector<s
|
||||
for (size_t i = 0; i + 1 < properties.size(); i += 2) {
|
||||
if (properties[i] >= keys.size()) {
|
||||
fprintf(stderr, "Out of bounds key: %zu in %zu\n", properties[i], keys.size());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
if (properties[i + 1] >= values.size()) {
|
||||
fprintf(stderr, "Out of bounds value: %zu in %zu\n", properties[i + 1], values.size());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
full_keys.push_back(keys[properties[i]]);
|
||||
@ -354,12 +355,12 @@ void readFeature(protozero::pbf_reader &pbf, size_t dim, double e, std::vector<s
|
||||
for (size_t i = 0; i + 1 < misc.size(); i += 2) {
|
||||
if (misc[i] >= keys.size()) {
|
||||
fprintf(stderr, "Out of bounds key: %zu in %zu\n", misc[i], keys.size());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
if (misc[i + 1] >= values.size()) {
|
||||
fprintf(stderr, "Out of bounds value: %zu in %zu\n", misc[i + 1], values.size());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
other.insert(std::pair<std::string, serial_val>(keys[misc[i]], values[misc[i + 1]]));
|
||||
@ -466,7 +467,7 @@ void runQueue() {
|
||||
for (size_t i = 0; i < CPUS; i++) {
|
||||
if (pthread_create(&pthreads[i], NULL, run_parse_feature, &qra[i]) != 0) {
|
||||
perror("pthread_create");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
}
|
||||
|
||||
|
13
geocsv.cpp
13
geocsv.cpp
@ -9,6 +9,7 @@
|
||||
#include "csv.hpp"
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "options.hpp"
|
||||
#include "errors.hpp"
|
||||
|
||||
void parse_geocsv(std::vector<struct serialization_state> &sst, std::string fname, int layer, std::string layername) {
|
||||
FILE *f;
|
||||
@ -19,7 +20,7 @@ void parse_geocsv(std::vector<struct serialization_state> &sst, std::string fnam
|
||||
f = fopen(fname.c_str(), "r");
|
||||
if (f == NULL) {
|
||||
perror(fname.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
}
|
||||
|
||||
@ -31,7 +32,7 @@ void parse_geocsv(std::vector<struct serialization_state> &sst, std::string fnam
|
||||
std::string err = check_utf8(s);
|
||||
if (err != "") {
|
||||
fprintf(stderr, "%s: %s\n", fname.c_str(), err.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_UTF8);
|
||||
}
|
||||
|
||||
header = csv_split(s.c_str());
|
||||
@ -53,7 +54,7 @@ void parse_geocsv(std::vector<struct serialization_state> &sst, std::string fnam
|
||||
|
||||
if (latcol < 0 || loncol < 0) {
|
||||
fprintf(stderr, "%s: Can't find \"lat\" and \"lon\" columns\n", fname.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CSV);
|
||||
}
|
||||
|
||||
size_t seq = 0;
|
||||
@ -61,7 +62,7 @@ void parse_geocsv(std::vector<struct serialization_state> &sst, std::string fnam
|
||||
std::string err = check_utf8(s);
|
||||
if (err != "") {
|
||||
fprintf(stderr, "%s: %s\n", fname.c_str(), err.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_UTF8);
|
||||
}
|
||||
|
||||
seq++;
|
||||
@ -69,7 +70,7 @@ void parse_geocsv(std::vector<struct serialization_state> &sst, std::string fnam
|
||||
|
||||
if (line.size() != header.size()) {
|
||||
fprintf(stderr, "%s:%zu: Mismatched column count: %zu in line, %zu in header\n", fname.c_str(), seq + 1, line.size(), header.size());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CSV);
|
||||
}
|
||||
|
||||
if (line[loncol].empty() || line[latcol].empty()) {
|
||||
@ -133,7 +134,7 @@ void parse_geocsv(std::vector<struct serialization_state> &sst, std::string fnam
|
||||
if (fname.size() != 0) {
|
||||
if (fclose(f) != 0) {
|
||||
perror("fclose");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -39,6 +39,7 @@
|
||||
#include "mvt.hpp"
|
||||
#include "geojson-loop.hpp"
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "errors.hpp"
|
||||
|
||||
int serialize_geojson_feature(struct serialization_state *sst, json_object *geometry, json_object *properties, json_object *id, int layer, json_object *tippecanoe, json_object *feature, std::string layername) {
|
||||
json_object *geometry_type = json_hash_get(geometry, "type");
|
||||
@ -332,7 +333,7 @@ struct json_pull *json_begin_map(char *map, long long len) {
|
||||
struct jsonmap *jm = new jsonmap;
|
||||
if (jm == NULL) {
|
||||
perror("Out of memory");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_MEMORY);
|
||||
}
|
||||
|
||||
jm->map = map;
|
||||
|
13
geometry.cpp
13
geometry.cpp
@ -20,6 +20,7 @@
|
||||
#include "serial.hpp"
|
||||
#include "main.hpp"
|
||||
#include "options.hpp"
|
||||
#include "errors.hpp"
|
||||
|
||||
static int pnpoly(drawvec &vert, size_t start, size_t nvert, long long testx, long long testy);
|
||||
static int clip(double *x0, double *y0, double *x1, double *y1, double xmin, double ymin, double xmax, double ymax);
|
||||
@ -39,7 +40,7 @@ drawvec decode_geometry(FILE *meta, std::atomic<long long> *geompos, int z, unsi
|
||||
|
||||
if (!deserialize_byte_io(meta, &d.op, geompos)) {
|
||||
fprintf(stderr, "Internal error: Unexpected end of file in geometry\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
if (d.op == VT_END) {
|
||||
break;
|
||||
@ -210,7 +211,7 @@ static void decode_clipped(mapbox::geometry::multi_polygon<long long> &t, drawve
|
||||
|
||||
if ((j == 0 && area < 0) || (j != 0 && area > 0)) {
|
||||
fprintf(stderr, "Ring area has wrong sign: %f for %zu\n", area, j);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
for (size_t k = 0; k < ring.size(); k++) {
|
||||
@ -316,7 +317,7 @@ drawvec clean_or_clip_poly(drawvec &geom, int z, int buffer, bool clip) {
|
||||
|
||||
fclose(f);
|
||||
fprintf(stderr, "Internal error: Polygon cleaning failed. Log in /tmp/wagyu.log\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
drawvec ret;
|
||||
@ -501,7 +502,7 @@ drawvec simple_clip_poly(drawvec &geom, long long minx, long long miny, long lon
|
||||
i = j - 1;
|
||||
} else {
|
||||
fprintf(stderr, "Unexpected operation in polygon %d\n", (int) geom[i].op);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1023,7 +1024,7 @@ drawvec fix_polygon(drawvec &geom) {
|
||||
outer = 0;
|
||||
} else {
|
||||
fprintf(stderr, "Internal error: polygon ring begins with %d, not moveto\n", geom[i].op);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1266,7 +1267,7 @@ drawvec stairstep(drawvec &geom, int z, int detail) {
|
||||
// out.push_back(draw(VT_LINETO, xx, yy));
|
||||
} else {
|
||||
fprintf(stderr, "Can't happen: stairstepping lineto with no moveto\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
}
|
||||
|
||||
|
35
jsontool.cpp
35
jsontool.cpp
@ -12,6 +12,7 @@
|
||||
#include "text.hpp"
|
||||
#include "geojson-loop.hpp"
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "errors.hpp"
|
||||
|
||||
int fail = EXIT_SUCCESS;
|
||||
bool wrap = false;
|
||||
@ -198,7 +199,7 @@ void out(std::string const &s, int type, json_object *properties) {
|
||||
|
||||
if (type != buffered_type) {
|
||||
fprintf(stderr, "Error: mix of bare geometries and features\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -209,13 +210,13 @@ void join_csv(json_object *j) {
|
||||
std::string s = csv_getline(csvfile);
|
||||
if (s.size() == 0) {
|
||||
fprintf(stderr, "Couldn't get column header from CSV file\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CSV);
|
||||
}
|
||||
|
||||
std::string err = check_utf8(s);
|
||||
if (err != "") {
|
||||
fprintf(stderr, "%s\n", err.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_UTF8);
|
||||
}
|
||||
|
||||
header = csv_split(s.c_str());
|
||||
@ -226,7 +227,7 @@ void join_csv(json_object *j) {
|
||||
|
||||
if (header.size() == 0) {
|
||||
fprintf(stderr, "No columns in CSV header \"%s\"\n", s.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CSV);
|
||||
}
|
||||
}
|
||||
|
||||
@ -259,7 +260,7 @@ void join_csv(json_object *j) {
|
||||
|
||||
if (joinkey < prev_joinkey) {
|
||||
fprintf(stderr, "GeoJSON file is out of sort: \"%s\" follows \"%s\"\n", joinkey.c_str(), prev_joinkey.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
prev_joinkey = joinkey;
|
||||
|
||||
@ -279,7 +280,7 @@ void join_csv(json_object *j) {
|
||||
std::string err = check_utf8(s);
|
||||
if (err != "") {
|
||||
fprintf(stderr, "%s\n", err.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_UTF8);
|
||||
}
|
||||
|
||||
fields = csv_split(s.c_str());
|
||||
@ -290,7 +291,7 @@ void join_csv(json_object *j) {
|
||||
|
||||
if (fields.size() > 0 && fields[0] < prevkey) {
|
||||
fprintf(stderr, "CSV file is out of sort: \"%s\" follows \"%s\"\n", fields[0].c_str(), prevkey.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CSV);
|
||||
}
|
||||
|
||||
if (fields.size() > 0 && fields[0] >= joinkey) {
|
||||
@ -310,7 +311,7 @@ void join_csv(json_object *j) {
|
||||
properties->value.object.values = (json_object **) realloc((void *) properties->value.object.values, (properties->value.object.length + 8 + fields.size()) * sizeof(json_object *));
|
||||
if (properties->value.object.keys == NULL || properties->value.object.values == NULL) {
|
||||
perror("realloc");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_MEMORY);
|
||||
}
|
||||
|
||||
for (size_t i = 1; i < fields.size(); i++) {
|
||||
@ -335,7 +336,7 @@ void join_csv(json_object *j) {
|
||||
json_object *vo = (json_object *) malloc(sizeof(json_object));
|
||||
if (ko == NULL || vo == NULL) {
|
||||
perror("malloc");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_MEMORY);
|
||||
}
|
||||
|
||||
ko->type = JSON_STRING;
|
||||
@ -345,7 +346,7 @@ void join_csv(json_object *j) {
|
||||
ko->value.string.string = strdup(k.c_str());
|
||||
if (ko->value.string.string == NULL) {
|
||||
perror("strdup");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_MEMORY);
|
||||
}
|
||||
|
||||
vo->type = attr_type;
|
||||
@ -356,7 +357,7 @@ void join_csv(json_object *j) {
|
||||
vo->value.string.string = strdup(v.c_str());
|
||||
if (vo->value.string.string == NULL) {
|
||||
perror("strdup");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_MEMORY);
|
||||
}
|
||||
} else if (attr_type == JSON_NUMBER) {
|
||||
vo->value.number.number = atof(v.c_str());
|
||||
@ -455,26 +456,26 @@ int main(int argc, char **argv) {
|
||||
pe = true;
|
||||
} else {
|
||||
fprintf(stderr, "%s: Unknown option for -p%s\n", argv[0], optarg);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_ARGS);
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
fprintf(stderr, "Unexpected option -%c\n", i);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_ARGS);
|
||||
}
|
||||
}
|
||||
|
||||
if (extract != NULL && wrap) {
|
||||
fprintf(stderr, "%s: --wrap and --extract not supported together\n", argv[0]);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_ARGS);
|
||||
}
|
||||
|
||||
if (csv != NULL) {
|
||||
csvfile = fopen(csv, "r");
|
||||
if (csvfile == NULL) {
|
||||
perror(csv);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
}
|
||||
|
||||
@ -485,7 +486,7 @@ int main(int argc, char **argv) {
|
||||
FILE *f = fopen(argv[i], "r");
|
||||
if (f == NULL) {
|
||||
perror(argv[i]);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
process(f, argv[i]);
|
||||
@ -502,7 +503,7 @@ int main(int argc, char **argv) {
|
||||
if (csvfile != NULL) {
|
||||
if (fclose(csvfile) != 0) {
|
||||
perror("close");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1110,6 +1110,10 @@ resolutions.
|
||||
\fB\fC\-S\fR or \fB\fC\-\-stats\fR: Just report statistics about each tile's size and the number of features in it, as a JSON structure.
|
||||
.IP \(bu 2
|
||||
\fB\fC\-f\fR or \fB\fC\-\-force\fR: Decode tiles even if polygon ring order or closure problems are detected
|
||||
.IP \(bu 2
|
||||
\fB\fC\-I\fR or \fB\fC\-\-integer\fR: Report coordinates in integer tile coordinates
|
||||
.IP \(bu 2
|
||||
\fB\fC\-F\fR or \fB\fC\-\-fraction\fR: Report coordinates as a fraction of the tile extent
|
||||
.RE
|
||||
.SH tippecanoe\-json\-tool
|
||||
.PP
|
||||
|
65
mbtiles.cpp
65
mbtiles.cpp
@ -18,6 +18,7 @@
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "write_json.hpp"
|
||||
#include "version.hpp"
|
||||
#include "errors.hpp"
|
||||
|
||||
size_t max_tilestats_attributes = 1000;
|
||||
size_t max_tilestats_sample_values = 1000;
|
||||
@ -28,45 +29,45 @@ sqlite3 *mbtiles_open(char *dbname, char **argv, int forcetable) {
|
||||
|
||||
if (sqlite3_open(dbname, &outdb) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: %s: %s\n", argv[0], dbname, sqlite3_errmsg(outdb));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
char *err = NULL;
|
||||
if (sqlite3_exec(outdb, "PRAGMA synchronous=0", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: async: %s\n", argv[0], err);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
if (sqlite3_exec(outdb, "PRAGMA locking_mode=EXCLUSIVE", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: async: %s\n", argv[0], err);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
if (sqlite3_exec(outdb, "PRAGMA journal_mode=DELETE", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: async: %s\n", argv[0], err);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
if (sqlite3_exec(outdb, "CREATE TABLE metadata (name text, value text);", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: Tileset \"%s\" already exists. You can use --force if you want to delete the old tileset.\n", argv[0], dbname);
|
||||
fprintf(stderr, "%s: %s\n", argv[0], err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_EXISTS);
|
||||
}
|
||||
}
|
||||
if (sqlite3_exec(outdb, "CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob);", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: create tiles table: %s\n", argv[0], err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_EXISTS);
|
||||
}
|
||||
}
|
||||
if (sqlite3_exec(outdb, "create unique index name on metadata (name);", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: index metadata: %s\n", argv[0], err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_EXISTS);
|
||||
}
|
||||
}
|
||||
if (sqlite3_exec(outdb, "create unique index tile_index on tiles (zoom_level, tile_column, tile_row);", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: index tiles: %s\n", argv[0], err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_EXISTS);
|
||||
}
|
||||
}
|
||||
|
||||
@ -78,7 +79,7 @@ void mbtiles_write_tile(sqlite3 *outdb, int z, int tx, int ty, const char *data,
|
||||
const char *query = "insert into tiles (zoom_level, tile_column, tile_row, tile_data) values (?, ?, ?, ?)";
|
||||
if (sqlite3_prepare_v2(outdb, query, -1, &stmt, NULL) != SQLITE_OK) {
|
||||
fprintf(stderr, "sqlite3 insert prep failed\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
sqlite3_bind_int(stmt, 1, z);
|
||||
@ -336,11 +337,11 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (outdb == NULL) {
|
||||
if (sqlite3_open("", &db) != SQLITE_OK) {
|
||||
fprintf(stderr, "Temporary db: %s\n", sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
if (sqlite3_exec(db, "CREATE TABLE metadata (name text, value text);", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "Create metadata table: %s\n", err);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -348,7 +349,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set name in metadata: %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -357,7 +358,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set description in metadata: %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -366,7 +367,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set version : %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -375,7 +376,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set minzoom: %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -384,7 +385,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set maxzoom: %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -393,7 +394,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set center: %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -402,7 +403,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set bounds: %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -411,7 +412,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set type: %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -421,7 +422,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set type: %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -431,7 +432,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set format: %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -441,7 +442,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set version: %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -450,7 +451,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set commandline: %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -461,7 +462,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set strategies: %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -561,7 +562,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (sqlite3_exec(db, sql, NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "set json: %s\n", err);
|
||||
if (!forcetable) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
}
|
||||
sqlite3_free(sql);
|
||||
@ -577,7 +578,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
FILE *fp = fopen(metadata.c_str(), "w");
|
||||
if (fp == NULL) {
|
||||
perror(metadata.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
json_writer state(fp);
|
||||
@ -594,7 +595,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
const char *v = (const char *) sqlite3_column_text(stmt, 1);
|
||||
if (k == NULL || v == NULL) {
|
||||
fprintf(stderr, "Corrupt mbtiles file: null metadata\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
state.json_comma_newline();
|
||||
@ -614,7 +615,7 @@ void mbtiles_write_metadata(sqlite3 *outdb, const char *outdir, const char *fnam
|
||||
if (outdb == NULL) {
|
||||
if (sqlite3_close(db) != SQLITE_OK) {
|
||||
fprintf(stderr, "Could not close temp database: %s\n", sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -624,11 +625,11 @@ void mbtiles_close(sqlite3 *outdb, const char *pgm) {
|
||||
|
||||
if (sqlite3_exec(outdb, "ANALYZE;", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: ANALYZE failed: %s\n", pgm, err);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
if (sqlite3_close(outdb) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: could not close database: %s\n", pgm, sqlite3_errmsg(outdb));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -661,7 +662,7 @@ std::map<std::string, layermap_entry> merge_layermaps(std::vector<std::map<std::
|
||||
auto out_entry = out.find(layername);
|
||||
if (out_entry == out.end()) {
|
||||
fprintf(stderr, "Internal error merging layers\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
for (auto fk = map->second.file_keys.begin(); fk != map->second.file_keys.end(); ++fk) {
|
||||
@ -726,7 +727,7 @@ void add_to_file_keys(std::map<std::string, type_and_string_stats> &file_keys, s
|
||||
|
||||
if (fka == file_keys.end()) {
|
||||
fprintf(stderr, "Can't happen (tilestats)\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
if (val.type == mvt_double) {
|
||||
|
11
mvt.cpp
11
mvt.cpp
@ -13,6 +13,7 @@
|
||||
#include "protozero/pbf_reader.hpp"
|
||||
#include "protozero/pbf_writer.hpp"
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "errors.hpp"
|
||||
|
||||
mvt_geometry::mvt_geometry(int nop, long long nx, long long ny) {
|
||||
this->op = nop;
|
||||
@ -115,7 +116,7 @@ bool mvt_tile::decode(std::string &message, bool &was_compressed) {
|
||||
if (is_compressed(message)) {
|
||||
std::string uncompressed;
|
||||
if (decompress(message, uncompressed) == 0) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_MVT);
|
||||
}
|
||||
src = uncompressed;
|
||||
was_compressed = true;
|
||||
@ -349,10 +350,10 @@ std::string mvt_tile::encode() {
|
||||
value_writer.add_bool(7, pbv.numeric_value.bool_value);
|
||||
} else if (pbv.type == mvt_null) {
|
||||
fprintf(stderr, "Internal error: trying to write null attribute to tile\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
} else {
|
||||
fprintf(stderr, "Internal error: trying to write undefined attribute type to tile\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
sorted_value sv;
|
||||
@ -418,7 +419,7 @@ std::string mvt_tile::encode() {
|
||||
|
||||
if (dx < INT_MIN || dx > INT_MAX || dy < INT_MIN || dy > INT_MAX) {
|
||||
fprintf(stderr, "Internal error: Geometry delta is too big: %lld,%lld\n", dx, dy);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
geometry.push_back(protozero::encode_zigzag32(dx));
|
||||
@ -431,7 +432,7 @@ std::string mvt_tile::encode() {
|
||||
length++;
|
||||
} else {
|
||||
fprintf(stderr, "\nInternal error: corrupted geometry\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
}
|
||||
|
||||
|
81
plugin.cpp
81
plugin.cpp
@ -24,6 +24,7 @@
|
||||
#include "projection.hpp"
|
||||
#include "geometry.hpp"
|
||||
#include "serial.hpp"
|
||||
#include "errors.hpp"
|
||||
|
||||
extern "C" {
|
||||
#include "jsonpull/jsonpull.h"
|
||||
@ -48,12 +49,12 @@ void *run_writer(void *a) {
|
||||
FILE *fp = fdopen(wa->write_to, "w");
|
||||
if (fp == NULL) {
|
||||
perror("fdopen (pipe writer)");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
json_writer state(fp);
|
||||
for (size_t i = 0; i < wa->layers->size(); i++) {
|
||||
layer_to_geojson((*(wa->layers))[i], wa->z, wa->x, wa->y, false, true, false, true, 0, 0, 0, true, state);
|
||||
layer_to_geojson((*(wa->layers))[i], wa->z, wa->x, wa->y, false, true, false, true, 0, 0, 0, true, state, 0);
|
||||
}
|
||||
|
||||
if (fclose(fp) != 0) {
|
||||
@ -65,7 +66,7 @@ void *run_writer(void *a) {
|
||||
}
|
||||
} else {
|
||||
perror("fclose output to filter");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -90,7 +91,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
FILE *f = fdopen(fd, "r");
|
||||
if (f == NULL) {
|
||||
perror("fdopen filter output");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
json_pull *jp = json_begin_file(f);
|
||||
|
||||
@ -102,7 +103,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
if (jp->root != NULL) {
|
||||
json_context(jp->root);
|
||||
}
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
json_free(jp->root);
|
||||
@ -122,7 +123,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
fprintf(stderr, "Filter output:%d: filtered feature with no geometry\n", jp->line);
|
||||
json_context(j);
|
||||
json_free(j);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
json_object *properties = json_hash_get(j, "properties");
|
||||
@ -130,27 +131,27 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
fprintf(stderr, "Filter output:%d: feature without properties hash\n", jp->line);
|
||||
json_context(j);
|
||||
json_free(j);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
json_object *geometry_type = json_hash_get(geometry, "type");
|
||||
if (geometry_type == NULL) {
|
||||
fprintf(stderr, "Filter output:%d: null geometry (additional not reported)\n", jp->line);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
if (geometry_type->type != JSON_STRING) {
|
||||
fprintf(stderr, "Filter output:%d: geometry type is not a string\n", jp->line);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
json_object *coordinates = json_hash_get(geometry, "coordinates");
|
||||
if (coordinates == NULL || coordinates->type != JSON_ARRAY) {
|
||||
fprintf(stderr, "Filter output:%d: feature without coordinates array\n", jp->line);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
int t;
|
||||
@ -162,7 +163,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
if (t >= GEOM_TYPES) {
|
||||
fprintf(stderr, "Filter output:%d: Can't handle geometry type %s\n", jp->line, geometry_type->value.string.string);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
std::string layername = "unknown";
|
||||
@ -240,7 +241,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
auto fk = layermap.find(layername);
|
||||
if (fk == layermap.end()) {
|
||||
fprintf(stderr, "Internal error: layer %s not found\n", layername.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
if (z < fk->second.minzoom) {
|
||||
fk->second.minzoom = z;
|
||||
@ -287,7 +288,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
json_end(jp);
|
||||
if (fclose(f) != 0) {
|
||||
perror("fclose postfilter output");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
|
||||
std::vector<mvt_layer> final;
|
||||
@ -309,7 +310,7 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
if (jp->root != NULL) {
|
||||
json_context(jp->root);
|
||||
}
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
json_free(jp->root);
|
||||
@ -330,7 +331,7 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
fprintf(stderr, "Filter output:%d: filtered feature with no geometry\n", jp->line);
|
||||
json_context(j);
|
||||
json_free(j);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
json_object *properties = json_hash_get(j, "properties");
|
||||
@ -338,27 +339,27 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
fprintf(stderr, "Filter output:%d: feature without properties hash\n", jp->line);
|
||||
json_context(j);
|
||||
json_free(j);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
json_object *geometry_type = json_hash_get(geometry, "type");
|
||||
if (geometry_type == NULL) {
|
||||
fprintf(stderr, "Filter output:%d: null geometry (additional not reported)\n", jp->line);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
if (geometry_type->type != JSON_STRING) {
|
||||
fprintf(stderr, "Filter output:%d: geometry type is not a string\n", jp->line);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
json_object *coordinates = json_hash_get(geometry, "coordinates");
|
||||
if (coordinates == NULL || coordinates->type != JSON_ARRAY) {
|
||||
fprintf(stderr, "Filter output:%d: feature without coordinates array\n", jp->line);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
int t;
|
||||
@ -370,7 +371,7 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
if (t >= GEOM_TYPES) {
|
||||
fprintf(stderr, "Filter output:%d: Can't handle geometry type %s\n", jp->line, geometry_type->value.string.string);
|
||||
json_context(j);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
drawvec dv;
|
||||
@ -476,7 +477,7 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
auto fk = layermap.find(layername);
|
||||
if (fk == layermap.end()) {
|
||||
fprintf(stderr, "Internal error: layer %s not found\n", layername.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
sf.layer = fk->second.id;
|
||||
|
||||
@ -539,17 +540,17 @@ void setup_filter(const char *filter, int *write_to, int *read_from, pid_t *pid,
|
||||
|
||||
if (pthread_mutex_lock(&pipe_lock) != 0) {
|
||||
perror("pthread_mutex_lock (pipe)");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
|
||||
int pipe_orig[2], pipe_filtered[2];
|
||||
if (pipe(pipe_orig) < 0) {
|
||||
perror("pipe (original features)");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
if (pipe(pipe_filtered) < 0) {
|
||||
perror("pipe (filtered features)");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
std::string z_str = std::to_string(z);
|
||||
@ -559,64 +560,64 @@ void setup_filter(const char *filter, int *write_to, int *read_from, pid_t *pid,
|
||||
*pid = fork();
|
||||
if (*pid < 0) {
|
||||
perror("fork");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
} else if (*pid == 0) {
|
||||
// child
|
||||
|
||||
if (dup2(pipe_orig[0], 0) < 0) {
|
||||
perror("dup child stdin");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
if (dup2(pipe_filtered[1], 1) < 0) {
|
||||
perror("dup child stdout");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
if (close(pipe_orig[1]) != 0) {
|
||||
perror("close output to filter");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
if (close(pipe_filtered[0]) != 0) {
|
||||
perror("close input from filter");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
if (close(pipe_orig[0]) != 0) {
|
||||
perror("close dup input of filter");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
if (close(pipe_filtered[1]) != 0) {
|
||||
perror("close dup output of filter");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
|
||||
// XXX close other fds?
|
||||
|
||||
if (execlp("sh", "sh", "-c", filter, "sh", z_str.c_str(), x_str.c_str(), y_str.c_str(), NULL) != 0) {
|
||||
perror("exec");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
} else {
|
||||
// parent
|
||||
|
||||
if (close(pipe_orig[0]) != 0) {
|
||||
perror("close filter-side reader");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
if (close(pipe_filtered[1]) != 0) {
|
||||
perror("close filter-side writer");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
if (fcntl(pipe_orig[1], F_SETFD, FD_CLOEXEC) != 0) {
|
||||
perror("cloxec output to filter");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
if (fcntl(pipe_filtered[0], F_SETFD, FD_CLOEXEC) != 0) {
|
||||
perror("cloxec input from filter");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
|
||||
if (pthread_mutex_unlock(&pipe_lock) != 0) {
|
||||
perror("pthread_mutex_unlock (pipe_lock)");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
|
||||
*write_to = pipe_orig[1];
|
||||
@ -640,7 +641,7 @@ std::vector<mvt_layer> filter_layers(const char *filter, std::vector<mvt_layer>
|
||||
pthread_t writer;
|
||||
if (pthread_create(&writer, NULL, run_writer, &wa) != 0) {
|
||||
perror("pthread_create (filter writer)");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
|
||||
std::vector<mvt_layer> nlayers = parse_layers(read_from, z, x, y, layermaps, tiling_seg, layer_unmaps, extent);
|
||||
@ -649,7 +650,7 @@ std::vector<mvt_layer> filter_layers(const char *filter, std::vector<mvt_layer>
|
||||
int stat_loc;
|
||||
if (waitpid(pid, &stat_loc, 0) < 0) {
|
||||
perror("waitpid for filter\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
if (WIFEXITED(stat_loc) || WIFSIGNALED(stat_loc)) {
|
||||
break;
|
||||
@ -659,7 +660,7 @@ std::vector<mvt_layer> filter_layers(const char *filter, std::vector<mvt_layer>
|
||||
void *ret;
|
||||
if (pthread_join(writer, &ret) != 0) {
|
||||
perror("pthread_join filter writer");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
|
||||
return nlayers;
|
||||
|
11
pool.cpp
11
pool.cpp
@ -5,6 +5,7 @@
|
||||
#include <math.h>
|
||||
#include "memfile.hpp"
|
||||
#include "pool.hpp"
|
||||
#include "errors.hpp"
|
||||
|
||||
int swizzlecmp(const char *a, const char *b) {
|
||||
ssize_t alen = strlen(a);
|
||||
@ -63,11 +64,11 @@ long long addpool(struct memfile *poolfile, struct memfile *treefile, const char
|
||||
long long off = poolfile->off;
|
||||
if (memfile_write(poolfile, &type, 1) < 0) {
|
||||
perror("memfile write");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_WRITE);
|
||||
}
|
||||
if (memfile_write(poolfile, (void *) s, strlen(s) + 1) < 0) {
|
||||
perror("memfile write");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_WRITE);
|
||||
}
|
||||
return off;
|
||||
}
|
||||
@ -84,11 +85,11 @@ long long addpool(struct memfile *poolfile, struct memfile *treefile, const char
|
||||
long long off = poolfile->off;
|
||||
if (memfile_write(poolfile, &type, 1) < 0) {
|
||||
perror("memfile write");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_WRITE);
|
||||
}
|
||||
if (memfile_write(poolfile, (void *) s, strlen(s) + 1) < 0) {
|
||||
perror("memfile write");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_WRITE);
|
||||
}
|
||||
|
||||
if (off >= LONG_MAX || treefile->off >= LONG_MAX) {
|
||||
@ -109,7 +110,7 @@ long long addpool(struct memfile *poolfile, struct memfile *treefile, const char
|
||||
long long p = treefile->off;
|
||||
if (memfile_write(treefile, &tsp, sizeof(struct stringpool)) < 0) {
|
||||
perror("memfile write");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_WRITE);
|
||||
}
|
||||
|
||||
if (ssp == -1) {
|
||||
|
@ -4,6 +4,7 @@
|
||||
#include <math.h>
|
||||
#include <atomic>
|
||||
#include "projection.hpp"
|
||||
#include "errors.hpp"
|
||||
|
||||
unsigned long long (*encode_index)(unsigned int wx, unsigned int wy) = NULL;
|
||||
void (*decode_index)(unsigned long long index, unsigned *wx, unsigned *wy) = NULL;
|
||||
@ -212,6 +213,6 @@ void set_projection_or_exit(const char *optarg) {
|
||||
}
|
||||
if (p->name == NULL) {
|
||||
fprintf(stderr, "Unknown projection (-s): %s\n", optarg);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_ARGS);
|
||||
}
|
||||
}
|
||||
|
@ -12,6 +12,7 @@
|
||||
#include "text.hpp"
|
||||
#include "mvt.hpp"
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "errors.hpp"
|
||||
|
||||
const char *geometry_names[GEOM_TYPES] = {
|
||||
"Point",
|
||||
@ -95,7 +96,7 @@ void parse_geometry(int t, json_object *j, drawvec &out, int op, const char *fna
|
||||
fprintf(stderr, "%s:%d: malformed point\n", fname, line);
|
||||
json_context(j);
|
||||
json_context(feature);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
}
|
||||
|
||||
@ -147,7 +148,7 @@ void stringify_value(json_object *value, int &type, std::string &stringified, co
|
||||
if (err != "") {
|
||||
fprintf(stderr, "%s:%d: %s\n", reading, line, err.c_str());
|
||||
json_context(feature);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_UTF8);
|
||||
}
|
||||
} else if (vt == JSON_NUMBER) {
|
||||
type = mvt_double;
|
||||
|
13
serial.cpp
13
serial.cpp
@ -20,6 +20,7 @@
|
||||
#include "projection.hpp"
|
||||
#include "evaluator.hpp"
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "errors.hpp"
|
||||
|
||||
// Offset coordinates to keep them positive
|
||||
#define COORD_OFFSET (4LL << 32)
|
||||
@ -30,7 +31,7 @@ size_t fwrite_check(const void *ptr, size_t size, size_t nitems, FILE *stream, c
|
||||
size_t w = fwrite(ptr, size, nitems, stream);
|
||||
if (w != nitems) {
|
||||
fprintf(stderr, "%s: Write to temporary file failed: %s\n", fname, strerror(errno));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_WRITE);
|
||||
}
|
||||
return w;
|
||||
}
|
||||
@ -52,14 +53,14 @@ void serialize_ulong_long(FILE *out, unsigned long long zigzag, std::atomic<long
|
||||
b |= 0x80;
|
||||
if (putc(b, out) == EOF) {
|
||||
fprintf(stderr, "%s: Write to temporary file failed: %s\n", fname, strerror(errno));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_WRITE);
|
||||
}
|
||||
*fpos += 1;
|
||||
zigzag >>= 7;
|
||||
} else {
|
||||
if (putc(b, out) == EOF) {
|
||||
fprintf(stderr, "%s: Write to temporary file failed: %s\n", fname, strerror(errno));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_WRITE);
|
||||
}
|
||||
*fpos += 1;
|
||||
break;
|
||||
@ -509,7 +510,7 @@ int serialize_feature(struct serialization_state *sst, serial_feature &sf) {
|
||||
|
||||
if (extent > 10000) {
|
||||
fprintf(stderr, "Exiting because this can't be right.\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -613,7 +614,7 @@ int serialize_feature(struct serialization_state *sst, serial_feature &sf) {
|
||||
}
|
||||
} else {
|
||||
fprintf(stderr, "Internal error: can't find layer name %s\n", sf.layername.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
for (ssize_t i = (ssize_t) sf.full_keys.size() - 1; i >= 0; i--) {
|
||||
@ -772,7 +773,7 @@ void coerce_value(std::string const &key, int &vt, std::string &val, std::map<st
|
||||
}
|
||||
} else {
|
||||
fprintf(stderr, "Can't happen: attribute type %d\n", a->second);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -8,6 +8,7 @@
|
||||
"maxzoom": "12",
|
||||
"minzoom": "0",
|
||||
"name": "tests/join-population/tabblock_06001420.mbtiles",
|
||||
"strategies": "[ { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 258 }, { \"tiny_polygons\": 255 }, { \"tiny_polygons\": 248 }, { \"tiny_polygons\": 152 }, { \"tiny_polygons\": 76 }, { \"tiny_polygons\": 46 } ]",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
|
@ -8,6 +8,7 @@
|
||||
"maxzoom": "12",
|
||||
"minzoom": "0",
|
||||
"name": "tests/join-population/tabblock_06001420.mbtiles",
|
||||
"strategies": "[ { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 258 }, { \"tiny_polygons\": 255 }, { \"tiny_polygons\": 248 }, { \"tiny_polygons\": 152 }, { \"tiny_polygons\": 76 }, { \"tiny_polygons\": 46 } ]",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
|
@ -8,6 +8,7 @@
|
||||
"maxzoom": "12",
|
||||
"minzoom": "0",
|
||||
"name": "tests/join-population/tabblock_06001420.mbtiles",
|
||||
"strategies": "[ { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 258 }, { \"tiny_polygons\": 255 }, { \"tiny_polygons\": 248 }, { \"tiny_polygons\": 152 }, { \"tiny_polygons\": 76 }, { \"tiny_polygons\": 46 } ]",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
|
@ -8,6 +8,7 @@
|
||||
"maxzoom": "12",
|
||||
"minzoom": "0",
|
||||
"name": "tests/join-population/tabblock_06001420.mbtiles",
|
||||
"strategies": "[ { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 258 }, { \"tiny_polygons\": 255 }, { \"tiny_polygons\": 248 }, { \"tiny_polygons\": 152 }, { \"tiny_polygons\": 76 }, { \"tiny_polygons\": 46 } ]",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
|
@ -8,6 +8,7 @@
|
||||
"maxzoom": "12",
|
||||
"minzoom": "0",
|
||||
"name": "tests/join-population/tabblock_06001420.mbtiles",
|
||||
"strategies": "[ { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 258 }, { \"tiny_polygons\": 255 }, { \"tiny_polygons\": 248 }, { \"tiny_polygons\": 152 }, { \"tiny_polygons\": 76 }, { \"tiny_polygons\": 46 } ]",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
|
@ -8,6 +8,7 @@
|
||||
"maxzoom": "12",
|
||||
"minzoom": "0",
|
||||
"name": "tests/join-population/tabblock_06001420.mbtiles",
|
||||
"strategies": "[ { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 258 }, { \"tiny_polygons\": 255 }, { \"tiny_polygons\": 248 }, { \"tiny_polygons\": 152 }, { \"tiny_polygons\": 76 }, { \"tiny_polygons\": 46 } ]",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
|
@ -8,6 +8,7 @@
|
||||
"maxzoom": "12",
|
||||
"minzoom": "0",
|
||||
"name": "tests/join-population/tabblock_06001420.mbtiles",
|
||||
"strategies": "[ { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 258 }, { \"tiny_polygons\": 255 }, { \"tiny_polygons\": 248 }, { \"tiny_polygons\": 152 }, { \"tiny_polygons\": 76 }, { \"tiny_polygons\": 46 } ]",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
|
@ -9,6 +9,7 @@
|
||||
"maxzoom": "12",
|
||||
"minzoom": "0",
|
||||
"name": "macarthur name",
|
||||
"strategies": "[ { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 258 }, { \"tiny_polygons\": 255 }, { \"tiny_polygons\": 248 }, { \"tiny_polygons\": 152 }, { \"tiny_polygons\": 76 }, { \"tiny_polygons\": 46 } ]",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
|
@ -8,6 +8,7 @@
|
||||
"maxzoom": "12",
|
||||
"minzoom": "0",
|
||||
"name": "tests/join-population/macarthur-folder + tests/join-population/macarthur2-folder + tests/join-population/tabblock_06001420-folder",
|
||||
"strategies": "[ { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 258 }, { \"tiny_polygons\": 255 }, { \"tiny_polygons\": 248 }, { \"tiny_polygons\": 152 }, { \"tiny_polygons\": 76 }, { \"tiny_polygons\": 46 } ]",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
|
@ -8,6 +8,7 @@
|
||||
"maxzoom": "12",
|
||||
"minzoom": "0",
|
||||
"name": "tests/join-population/macarthur.mbtiles + tests/join-population/macarthur2.mbtiles + tests/join-population/tabblock_06001420.mbtiles",
|
||||
"strategies": "[ { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 258 }, { \"tiny_polygons\": 255 }, { \"tiny_polygons\": 248 }, { \"tiny_polygons\": 152 }, { \"tiny_polygons\": 76 }, { \"tiny_polygons\": 46 } ]",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
|
@ -8,6 +8,7 @@
|
||||
"maxzoom": "12",
|
||||
"minzoom": "0",
|
||||
"name": "tests/join-population/macarthur.mbtiles + tests/join-population/macarthur2.mbtiles + tests/join-population/tabblock_06001420.mbtiles",
|
||||
"strategies": "[ { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 258 }, { \"tiny_polygons\": 255 }, { \"tiny_polygons\": 248 }, { \"tiny_polygons\": 152 }, { \"tiny_polygons\": 76 }, { \"tiny_polygons\": 46 } ]",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
|
@ -8,6 +8,7 @@
|
||||
"maxzoom": "12",
|
||||
"minzoom": "0",
|
||||
"name": "tests/join-population/macarthur.mbtiles + tests/join-population/macarthur2.mbtiles + tests/join-population/tabblock_06001420.mbtiles",
|
||||
"strategies": "[ { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 260 }, { \"tiny_polygons\": 258 }, { \"tiny_polygons\": 255 }, { \"tiny_polygons\": 248 }, { \"tiny_polygons\": 152 }, { \"tiny_polygons\": 76 }, { \"tiny_polygons\": 46 } ]",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
|
64
tests/muni/decode/multi.mbtiles.fraction.json
Normal file
64
tests/muni/decode/multi.mbtiles.fraction.json
Normal file
@ -0,0 +1,64 @@
|
||||
{ "type": "FeatureCollection", "properties": {
|
||||
"bounds": "-122.538670,37.705764,-12.240000,37.836443",
|
||||
"center": "-122.431641,37.788049,11",
|
||||
"description": "tests/muni/decode/multi.mbtiles",
|
||||
"format": "pbf",
|
||||
"generator_options": "./tippecanoe -q -z11 -Z11 -f -o tests/muni/decode/multi.mbtiles tests/muni/muni.json",
|
||||
"json": "{\"vector_layers\": [ { \"id\": \"muni\", \"description\": \"\", \"minzoom\": 11, \"maxzoom\": 11, \"fields\": {\"name\": \"String\"} }, { \"id\": \"subway\", \"description\": \"\", \"minzoom\": 11, \"maxzoom\": 11, \"fields\": {\"name\": \"String\"} } ],\"tilestats\": {\"layerCount\": 2,\"layers\": [{\"layer\": \"muni\",\"count\": 4592,\"geometry\": \"Point\",\"attributeCount\": 1,\"attributes\": [{\"attribute\": \"name\",\"count\": 1000,\"type\": \"string\",\"values\": [\" 4th St & Brannan St\",\" Conzelman Rd & Mccullough Rd\",\"100 O'Shaughnessy Blvd\",\"101 Dakota St\",\"1095 CONNECTICUT ST\",\"10th Ave & Ortega St\",\"10th Ave & Pacheco St\",\"10th Ave & Quintara St\",\"1100 Lake Merced Blvd\",\"115 TELEGRAPH Hill Blvd\",\"117 Warren Dr\",\"11th St & Bryant St\",\"11th St & Folsom St\",\"11th St & Harrison St\",\"11th St & Howard St\",\"11th St & Market St\",\"11th St & Mission St\",\"11th St/btw Market & Mission\",\"120 Portola Dr\",\"126 Miraloma Dr\",\"13th St & Gateview Ave\",\"14 Dakota St\",\"14th Avenue & Geary Boulevard\",\"14th Ave & Quintara St\",\"14th Ave & Santiago St\",\"14th Ave & Taraval St\",\"14th Ave & Ulloa St\",\"14th St & Alpine Ter\",\"14th St & Castro St\",\"14th St & Church St\",\"14th St & Mission St\",\"14th St & Noe St\",\"14th St & SANCHEZ ST\",\"14th St & Sanchez St\",\"150 Otis St\",\"15th Ave & Noriega St\",\"15th Ave & Ortega St\",\"15th Ave & Pacheco St\",\"15th Ave & Quintara St\",\"15th Ave & Taraval St\",\"15th Ave & Ulloa St\",\"15th Ave & West Portal Ave\",\"15th St & Mission St\",\"16 th St & South Van Ness\",\"164 Addison St\",\"1650 Geneva Ave\",\"1697 7th Ave\",\"16th Ave & Lawton St\",\"16th Ave & Lomita Ave\",\"16th Ave & Moraga St\",\"16th Ave & Noriega St\",\"16th Ave & Ortega St\",\"16th Ave & Pacheco St\",\"16th Avenue at Lawton Street\",\"16th St & 4th St\",\"16th St & Bryant St\",\"16th St & Church St\",\"16th St & Dolores St\",\"16th St & Folsom St\",\"16th St & Guerrero St\",\"16th St & Harrison St\",\"16th St & Kansas St\",\"16th St & Mission St\",\"16th St & Missouri St\",\"16th St & Potrero Ave\",\"16th St & San Bruno Ave\",\"16th St & Shotwell St\",\"16th St & South Van Ness\",\"16th St & Valencia St\",\"16th St & Vermont St\",\"16th St & Wisconsin St\",\"16th St& Rhode Island St\",\"16th Street & 4th Street\",\"16th Street & Missouri St\",\"16th Street & Rhode Islandi St\",\"16th Street & Wisconsin St\",\"170 Buckingham Way\",\"1701 Geneva Ave\",\"1721 Geneva Ave\",\"1725 Sunnydale Ave\",\"1730 3rd St\",\"1731 3RD ST\",\"1750 Geneva Ave\",\"176 Rhode Island St\",\"1798 Laguna Honda Blvd\",\"17TH ST & KANSAS ST\",\"17th Ave & Quintara St\",\"17th Ave & Rivera St\",\"17th Ave & Santiago St\",\"17th St & Belvedere St\",\"17th St & Castro St\",\"17th St & Clayton St\",\"17th St & Cole St\",\"17th St & De Haro St\",\"17th St & Diamond St\",\"17th St & Kansas St\",\"17th St & Noe St\",\"17th St & Wisconsin St\",\"1800 Sunnydale Ave\",\"18th St & 3rd St\"]}]},{\"layer\": \"subway\",\"count\": 19,\"geometry\": \"Point\",\"attributeCount\": 1,\"attributes\": [{\"attribute\": \"name\",\"count\": 18,\"type\": \"string\",\"values\": [\"Metro Castro Station/Downtown\",\"Metro Castro Station/Outbound\",\"Metro Church Station/Downtown\",\"Metro Church Station/Outbound\",\"Metro Civic Center Station/Downtn\",\"Metro Civic Center Station/Downtown\",\"Metro Civic Center Station/Outbd\",\"Metro Embarcadero Station\",\"Metro Embarcadero Station/Downtown\",\"Metro Forest Hill Station/Downtown\",\"Metro Montgomery Station/Downtown\",\"Metro Montgomery Station/Outbound\",\"Metro Powell Station/Downtown\",\"Metro Powell Station/Outbound\",\"Metro Van Ness Station\",\"Metro Van Ness Station/Downtown\",\"Metro Van Ness Station/Outbound\",\"Van Ness Station Outbound\"]}]}]}}",
|
||||
"maxzoom": "11",
|
||||
"minzoom": "11",
|
||||
"name": "tests/muni/decode/multi.mbtiles",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
{ "type": "FeatureCollection", "properties": { "zoom": 11, "x": 326, "y": 791 }, "features": [
|
||||
] }
|
||||
,
|
||||
{ "type": "FeatureCollection", "properties": { "zoom": 11, "x": 327, "y": 792 }, "features": [
|
||||
] }
|
||||
,
|
||||
{ "type": "FeatureCollection", "properties": { "zoom": 11, "x": 327, "y": 791 }, "features": [
|
||||
{ "type": "FeatureCollection", "properties": { "layer": "subway", "version": 2, "extent": 4096 }, "features": [
|
||||
{ "type": "Feature", "properties": { "name": "Metro Castro Station/Outbound" }, "geometry": { "type": "Point", "coordinates": [ 0.47900390625, 0.6826171875 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Castro Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 0.4794921875, 0.68310546875 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Forest Hill Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 0.346435546875, 0.785888671875 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Montgomery Station/Outbound" }, "geometry": { "type": "Point", "coordinates": [ 0.667724609375, 0.494873046875 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Montgomery Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 0.6689453125, 0.495361328125 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Embarcadero Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 0.699462890625, 0.46435546875 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Embarcadero Station" }, "geometry": { "type": "Point", "coordinates": [ 0.7001953125, 0.46337890625 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Embarcadero Station" }, "geometry": { "type": "Point", "coordinates": [ 0.700439453125, 0.46337890625 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Civic Center Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 0.608642578125, 0.5556640625 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Van Ness Station Outbound" }, "geometry": { "type": "Point", "coordinates": [ 0.56982421875, 0.59228515625 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Van Ness Station/Outbound" }, "geometry": { "type": "Point", "coordinates": [ 0.5703125, 0.593017578125 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Van Ness Station" }, "geometry": { "type": "Point", "coordinates": [ 0.57080078125, 0.593505859375 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Van Ness Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 0.5703125, 0.59375 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Civic Center Station/Outbd" }, "geometry": { "type": "Point", "coordinates": [ 0.594482421875, 0.567626953125 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Civic Center Station/Downtn" }, "geometry": { "type": "Point", "coordinates": [ 0.595703125, 0.568603515625 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Church Station/Outbound" }, "geometry": { "type": "Point", "coordinates": [ 0.51318359375, 0.649169921875 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Church Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 0.513916015625, 0.650146484375 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Powell Station/Outbound" }, "geometry": { "type": "Point", "coordinates": [ 0.635498046875, 0.527099609375 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Powell Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 0.635986328125, 0.52783203125 ] } }
|
||||
] }
|
||||
] }
|
||||
,
|
||||
{ "type": "FeatureCollection", "properties": { "zoom": 11, "x": 954, "y": 791 }, "features": [
|
||||
] }
|
||||
] }
|
64
tests/muni/decode/multi.mbtiles.integer.json
Normal file
64
tests/muni/decode/multi.mbtiles.integer.json
Normal file
@ -0,0 +1,64 @@
|
||||
{ "type": "FeatureCollection", "properties": {
|
||||
"bounds": "-122.538670,37.705764,-12.240000,37.836443",
|
||||
"center": "-122.431641,37.788049,11",
|
||||
"description": "tests/muni/decode/multi.mbtiles",
|
||||
"format": "pbf",
|
||||
"generator_options": "./tippecanoe -q -z11 -Z11 -f -o tests/muni/decode/multi.mbtiles tests/muni/muni.json",
|
||||
"json": "{\"vector_layers\": [ { \"id\": \"muni\", \"description\": \"\", \"minzoom\": 11, \"maxzoom\": 11, \"fields\": {\"name\": \"String\"} }, { \"id\": \"subway\", \"description\": \"\", \"minzoom\": 11, \"maxzoom\": 11, \"fields\": {\"name\": \"String\"} } ],\"tilestats\": {\"layerCount\": 2,\"layers\": [{\"layer\": \"muni\",\"count\": 4592,\"geometry\": \"Point\",\"attributeCount\": 1,\"attributes\": [{\"attribute\": \"name\",\"count\": 1000,\"type\": \"string\",\"values\": [\" 4th St & Brannan St\",\" Conzelman Rd & Mccullough Rd\",\"100 O'Shaughnessy Blvd\",\"101 Dakota St\",\"1095 CONNECTICUT ST\",\"10th Ave & Ortega St\",\"10th Ave & Pacheco St\",\"10th Ave & Quintara St\",\"1100 Lake Merced Blvd\",\"115 TELEGRAPH Hill Blvd\",\"117 Warren Dr\",\"11th St & Bryant St\",\"11th St & Folsom St\",\"11th St & Harrison St\",\"11th St & Howard St\",\"11th St & Market St\",\"11th St & Mission St\",\"11th St/btw Market & Mission\",\"120 Portola Dr\",\"126 Miraloma Dr\",\"13th St & Gateview Ave\",\"14 Dakota St\",\"14th Avenue & Geary Boulevard\",\"14th Ave & Quintara St\",\"14th Ave & Santiago St\",\"14th Ave & Taraval St\",\"14th Ave & Ulloa St\",\"14th St & Alpine Ter\",\"14th St & Castro St\",\"14th St & Church St\",\"14th St & Mission St\",\"14th St & Noe St\",\"14th St & SANCHEZ ST\",\"14th St & Sanchez St\",\"150 Otis St\",\"15th Ave & Noriega St\",\"15th Ave & Ortega St\",\"15th Ave & Pacheco St\",\"15th Ave & Quintara St\",\"15th Ave & Taraval St\",\"15th Ave & Ulloa St\",\"15th Ave & West Portal Ave\",\"15th St & Mission St\",\"16 th St & South Van Ness\",\"164 Addison St\",\"1650 Geneva Ave\",\"1697 7th Ave\",\"16th Ave & Lawton St\",\"16th Ave & Lomita Ave\",\"16th Ave & Moraga St\",\"16th Ave & Noriega St\",\"16th Ave & Ortega St\",\"16th Ave & Pacheco St\",\"16th Avenue at Lawton Street\",\"16th St & 4th St\",\"16th St & Bryant St\",\"16th St & Church St\",\"16th St & Dolores St\",\"16th St & Folsom St\",\"16th St & Guerrero St\",\"16th St & Harrison St\",\"16th St & Kansas St\",\"16th St & Mission St\",\"16th St & Missouri St\",\"16th St & Potrero Ave\",\"16th St & San Bruno Ave\",\"16th St & Shotwell St\",\"16th St & South Van Ness\",\"16th St & Valencia St\",\"16th St & Vermont St\",\"16th St & Wisconsin St\",\"16th St& Rhode Island St\",\"16th Street & 4th Street\",\"16th Street & Missouri St\",\"16th Street & Rhode Islandi St\",\"16th Street & Wisconsin St\",\"170 Buckingham Way\",\"1701 Geneva Ave\",\"1721 Geneva Ave\",\"1725 Sunnydale Ave\",\"1730 3rd St\",\"1731 3RD ST\",\"1750 Geneva Ave\",\"176 Rhode Island St\",\"1798 Laguna Honda Blvd\",\"17TH ST & KANSAS ST\",\"17th Ave & Quintara St\",\"17th Ave & Rivera St\",\"17th Ave & Santiago St\",\"17th St & Belvedere St\",\"17th St & Castro St\",\"17th St & Clayton St\",\"17th St & Cole St\",\"17th St & De Haro St\",\"17th St & Diamond St\",\"17th St & Kansas St\",\"17th St & Noe St\",\"17th St & Wisconsin St\",\"1800 Sunnydale Ave\",\"18th St & 3rd St\"]}]},{\"layer\": \"subway\",\"count\": 19,\"geometry\": \"Point\",\"attributeCount\": 1,\"attributes\": [{\"attribute\": \"name\",\"count\": 18,\"type\": \"string\",\"values\": [\"Metro Castro Station/Downtown\",\"Metro Castro Station/Outbound\",\"Metro Church Station/Downtown\",\"Metro Church Station/Outbound\",\"Metro Civic Center Station/Downtn\",\"Metro Civic Center Station/Downtown\",\"Metro Civic Center Station/Outbd\",\"Metro Embarcadero Station\",\"Metro Embarcadero Station/Downtown\",\"Metro Forest Hill Station/Downtown\",\"Metro Montgomery Station/Downtown\",\"Metro Montgomery Station/Outbound\",\"Metro Powell Station/Downtown\",\"Metro Powell Station/Outbound\",\"Metro Van Ness Station\",\"Metro Van Ness Station/Downtown\",\"Metro Van Ness Station/Outbound\",\"Van Ness Station Outbound\"]}]}]}}",
|
||||
"maxzoom": "11",
|
||||
"minzoom": "11",
|
||||
"name": "tests/muni/decode/multi.mbtiles",
|
||||
"type": "overlay",
|
||||
"version": "2"
|
||||
}, "features": [
|
||||
{ "type": "FeatureCollection", "properties": { "zoom": 11, "x": 326, "y": 791 }, "features": [
|
||||
] }
|
||||
,
|
||||
{ "type": "FeatureCollection", "properties": { "zoom": 11, "x": 327, "y": 792 }, "features": [
|
||||
] }
|
||||
,
|
||||
{ "type": "FeatureCollection", "properties": { "zoom": 11, "x": 327, "y": 791 }, "features": [
|
||||
{ "type": "FeatureCollection", "properties": { "layer": "subway", "version": 2, "extent": 4096 }, "features": [
|
||||
{ "type": "Feature", "properties": { "name": "Metro Castro Station/Outbound" }, "geometry": { "type": "Point", "coordinates": [ 1962, 2796 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Castro Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 1964, 2798 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Forest Hill Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 1419, 3219 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Montgomery Station/Outbound" }, "geometry": { "type": "Point", "coordinates": [ 2735, 2027 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Montgomery Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 2740, 2029 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Embarcadero Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 2865, 1902 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Embarcadero Station" }, "geometry": { "type": "Point", "coordinates": [ 2868, 1898 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Embarcadero Station" }, "geometry": { "type": "Point", "coordinates": [ 2869, 1898 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Civic Center Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 2493, 2276 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Van Ness Station Outbound" }, "geometry": { "type": "Point", "coordinates": [ 2334, 2426 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Van Ness Station/Outbound" }, "geometry": { "type": "Point", "coordinates": [ 2336, 2429 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Van Ness Station" }, "geometry": { "type": "Point", "coordinates": [ 2338, 2431 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Van Ness Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 2336, 2432 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Civic Center Station/Outbd" }, "geometry": { "type": "Point", "coordinates": [ 2435, 2325 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Civic Center Station/Downtn" }, "geometry": { "type": "Point", "coordinates": [ 2440, 2329 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Church Station/Outbound" }, "geometry": { "type": "Point", "coordinates": [ 2102, 2659 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Church Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 2105, 2663 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Powell Station/Outbound" }, "geometry": { "type": "Point", "coordinates": [ 2603, 2159 ] } }
|
||||
,
|
||||
{ "type": "Feature", "properties": { "name": "Metro Powell Station/Downtown" }, "geometry": { "type": "Point", "coordinates": [ 2605, 2162 ] } }
|
||||
] }
|
||||
] }
|
||||
,
|
||||
{ "type": "FeatureCollection", "properties": { "zoom": 11, "x": 954, "y": 791 }, "features": [
|
||||
] }
|
||||
] }
|
5
text.cpp
5
text.cpp
@ -5,6 +5,7 @@
|
||||
#include <string.h>
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "milo/milo.h"
|
||||
#include "errors.hpp"
|
||||
|
||||
/**
|
||||
* Returns an empty string if `s` is valid utf8;
|
||||
@ -132,7 +133,7 @@ int integer_zoom(std::string where, std::string text) {
|
||||
double d = atof(text.c_str());
|
||||
if (!isfinite(d) || d != floor(d) || d < 0 || d > 32) {
|
||||
fprintf(stderr, "%s: Expected integer zoom level in \"tippecanoe\" GeoJSON extension, not %s\n", where.c_str(), text.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
return d;
|
||||
}
|
||||
@ -179,7 +180,7 @@ char *dtoa_milo(double val) {
|
||||
char *dup = strdup(s.c_str());
|
||||
if (dup == NULL) {
|
||||
perror("strdup");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_MEMORY);
|
||||
}
|
||||
return dup;
|
||||
}
|
||||
|
@ -37,6 +37,7 @@
|
||||
#include <functional>
|
||||
#include "jsonpull/jsonpull.h"
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "errors.hpp"
|
||||
|
||||
int pk = false;
|
||||
int pC = false;
|
||||
@ -54,6 +55,7 @@ struct stats {
|
||||
int maxzoom;
|
||||
double midlat, midlon;
|
||||
double minlat, minlon, maxlat, maxlon;
|
||||
std::vector<struct strategy> strategies;
|
||||
};
|
||||
|
||||
void aprintf(std::string *buf, const char *format, ...) {
|
||||
@ -63,7 +65,7 @@ void aprintf(std::string *buf, const char *format, ...) {
|
||||
va_start(ap, format);
|
||||
if (vasprintf(&tmp, format, ap) < 0) {
|
||||
fprintf(stderr, "memory allocation failure\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_MEMORY);
|
||||
}
|
||||
va_end(ap);
|
||||
|
||||
@ -78,7 +80,7 @@ void handle(std::string message, int z, unsigned x, unsigned y, std::map<std::st
|
||||
|
||||
if (!tile.decode(message, was_compressed)) {
|
||||
fprintf(stderr, "Couldn't decompress tile %d/%u/%u\n", z, x, y);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_MVT);
|
||||
}
|
||||
|
||||
for (size_t l = 0; l < tile.layers.size(); l++) {
|
||||
@ -430,13 +432,13 @@ struct reader *begin_reading(char *fname) {
|
||||
|
||||
if (sqlite3_open(fname, &db) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
char *err = NULL;
|
||||
if (sqlite3_exec(db, "PRAGMA integrity_check;", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: integrity_check: %s\n", fname, err);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
const char *sql = "SELECT zoom_level, tile_column, tile_row, tile_data from tiles order by zoom_level, tile_column, tile_row;";
|
||||
@ -444,7 +446,7 @@ struct reader *begin_reading(char *fname) {
|
||||
|
||||
if (sqlite3_prepare_v2(db, sql, -1, &stmt, NULL) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: select failed: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SQLITE);
|
||||
}
|
||||
|
||||
r->db = db;
|
||||
@ -561,7 +563,7 @@ void handle_tasks(std::map<zxy, std::vector<std::string>> &tasks, std::vector<st
|
||||
for (size_t i = 0; i < CPUS; i++) {
|
||||
if (pthread_create(&pthreads[i], NULL, join_worker, &args[i]) != 0) {
|
||||
perror("pthread_create");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
}
|
||||
|
||||
@ -582,6 +584,54 @@ void handle_tasks(std::map<zxy, std::vector<std::string>> &tasks, std::vector<st
|
||||
}
|
||||
}
|
||||
|
||||
void handle_strategies(const unsigned char *s, std::vector<strategy> *st) {
|
||||
json_pull *jp = json_begin_string((const char *) s);
|
||||
json_object *o = json_read_tree(jp);
|
||||
|
||||
if (o != NULL && o->type == JSON_ARRAY) {
|
||||
for (size_t i = 0; i < o->value.array.length; i++) {
|
||||
json_object *h = o->value.array.array[i];
|
||||
if (h->type == JSON_HASH) {
|
||||
for (size_t j = 0; j < h->value.object.length; j++) {
|
||||
json_object *k = h->value.object.keys[j];
|
||||
json_object *v = h->value.object.values[j];
|
||||
|
||||
if (k->type != JSON_STRING) {
|
||||
fprintf(stderr, "Key %zu of %zu is not a string: %s\n", j, i, s);
|
||||
} else if (v->type != JSON_NUMBER) {
|
||||
fprintf(stderr, "Value %zu of %zu is not a number: %s\n", j, i, s);
|
||||
} else {
|
||||
if (i <= st->size()) {
|
||||
st->resize(i + 1);
|
||||
}
|
||||
|
||||
if (strcmp(k->value.string.string, "dropped_by_rate") == 0) {
|
||||
(*st)[i].dropped_by_rate += v->value.number.number;
|
||||
} else if (strcmp(k->value.string.string, "dropped_by_gamma") == 0) {
|
||||
(*st)[i].dropped_by_gamma += v->value.number.number;
|
||||
} else if (strcmp(k->value.string.string, "dropped_as_needed") == 0) {
|
||||
(*st)[i].dropped_as_needed += v->value.number.number;
|
||||
} else if (strcmp(k->value.string.string, "coalesced_as_needed") == 0) {
|
||||
(*st)[i].coalesced_as_needed += v->value.number.number;
|
||||
} else if (strcmp(k->value.string.string, "detail_reduced") == 0) {
|
||||
(*st)[i].detail_reduced += v->value.number.number;
|
||||
} else if (strcmp(k->value.string.string, "tiny_polygons") == 0) {
|
||||
(*st)[i].tiny_polygons += v->value.number.number;
|
||||
} else if (strcmp(k->value.string.string, "tile_size_desired") == 0) {
|
||||
(*st)[i].tile_size += v->value.number.number;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fprintf(stderr, "Element %zu is not a hash: %s\n", i, s);
|
||||
}
|
||||
}
|
||||
json_free(o);
|
||||
}
|
||||
|
||||
json_end(jp);
|
||||
}
|
||||
|
||||
void handle_vector_layers(json_object *vector_layers, std::map<std::string, layermap_entry> &layermap, std::map<std::string, std::string> &attribute_descriptions) {
|
||||
if (vector_layers != NULL && vector_layers->type == JSON_ARRAY) {
|
||||
for (size_t i = 0; i < vector_layers->value.array.length; i++) {
|
||||
@ -621,7 +671,7 @@ void handle_vector_layers(json_object *vector_layers, std::map<std::string, laye
|
||||
}
|
||||
}
|
||||
|
||||
void decode(struct reader *readers, std::map<std::string, layermap_entry> &layermap, sqlite3 *outdb, const char *outdir, struct stats *st, std::vector<std::string> &header, std::map<std::string, std::vector<std::string>> &mapping, std::set<std::string> &exclude, int ifmatched, std::string &attribution, std::string &description, std::set<std::string> &keep_layers, std::set<std::string> &remove_layers, std::string &name, json_object *filter, std::map<std::string, std::string> &attribute_descriptions, std::string &generator_options) {
|
||||
void decode(struct reader *readers, std::map<std::string, layermap_entry> &layermap, sqlite3 *outdb, const char *outdir, struct stats *st, std::vector<std::string> &header, std::map<std::string, std::vector<std::string>> &mapping, std::set<std::string> &exclude, int ifmatched, std::string &attribution, std::string &description, std::set<std::string> &keep_layers, std::set<std::string> &remove_layers, std::string &name, json_object *filter, std::map<std::string, std::string> &attribute_descriptions, std::string &generator_options, std::vector<strategy> *strategies) {
|
||||
std::vector<std::map<std::string, layermap_entry>> layermaps;
|
||||
for (size_t i = 0; i < CPUS; i++) {
|
||||
layermaps.push_back(std::map<std::string, layermap_entry>());
|
||||
@ -840,11 +890,18 @@ void decode(struct reader *readers, std::map<std::string, layermap_entry> &layer
|
||||
}
|
||||
sqlite3_finalize(r->stmt);
|
||||
}
|
||||
if (sqlite3_prepare_v2(db, "SELECT value from metadata where name = 'strategies'", -1, &r->stmt, NULL) == SQLITE_OK) {
|
||||
if (sqlite3_step(r->stmt) == SQLITE_ROW) {
|
||||
const unsigned char *s = sqlite3_column_text(r->stmt, 0);
|
||||
handle_strategies(s, strategies);
|
||||
}
|
||||
sqlite3_finalize(r->stmt);
|
||||
}
|
||||
|
||||
// Closes either real db or temp mirror of metadata.json
|
||||
if (sqlite3_close(db) != SQLITE_OK) {
|
||||
fprintf(stderr, "Could not close database: %s\n", sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
|
||||
delete r;
|
||||
@ -853,7 +910,7 @@ void decode(struct reader *readers, std::map<std::string, layermap_entry> &layer
|
||||
|
||||
void usage(char **argv) {
|
||||
fprintf(stderr, "Usage: %s [-f] [-i] [-pk] [-pC] [-c joins.csv] [-X] [-x exclude ...] -o new.mbtiles source.mbtiles ...\n", argv[0]);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_ARGS);
|
||||
}
|
||||
|
||||
int main(int argc, char **argv) {
|
||||
@ -994,14 +1051,14 @@ int main(int argc, char **argv) {
|
||||
pe = true;
|
||||
} else {
|
||||
fprintf(stderr, "%s: Unknown option for -p%s\n", argv[0], optarg);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_ARGS);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'c':
|
||||
if (csv != NULL) {
|
||||
fprintf(stderr, "Only one -c for now\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_ARGS);
|
||||
}
|
||||
|
||||
csv = optarg;
|
||||
@ -1028,7 +1085,7 @@ int main(int argc, char **argv) {
|
||||
char *cp = strchr(optarg, ':');
|
||||
if (cp == NULL || cp == optarg) {
|
||||
fprintf(stderr, "%s: -R requires old:new\n", argv[0]);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_ARGS);
|
||||
}
|
||||
std::string before = std::string(optarg).substr(0, cp - optarg);
|
||||
std::string after = std::string(cp + 1);
|
||||
@ -1050,7 +1107,7 @@ int main(int argc, char **argv) {
|
||||
max_tilestats_values = atoi(optarg);
|
||||
} else {
|
||||
fprintf(stderr, "%s: Unrecognized option --%s\n", argv[0], opt);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_ARGS);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@ -1076,7 +1133,7 @@ int main(int argc, char **argv) {
|
||||
|
||||
if (minzoom > maxzoom) {
|
||||
fprintf(stderr, "%s: Minimum zoom -Z%d cannot be greater than maxzoom -z%d\n", argv[0], minzoom, maxzoom);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_ARGS);
|
||||
}
|
||||
|
||||
if (out_mbtiles != NULL) {
|
||||
@ -1117,8 +1174,9 @@ int main(int argc, char **argv) {
|
||||
|
||||
std::map<std::string, std::string> attribute_descriptions;
|
||||
std::string generator_options;
|
||||
std::vector<strategy> strategies;
|
||||
|
||||
decode(readers, layermap, outdb, out_dir, &st, header, mapping, exclude, ifmatched, attribution, description, keep_layers, remove_layers, name, filter, attribute_descriptions, generator_options);
|
||||
decode(readers, layermap, outdb, out_dir, &st, header, mapping, exclude, ifmatched, attribution, description, keep_layers, remove_layers, name, filter, attribute_descriptions, generator_options, &strategies);
|
||||
|
||||
if (set_attribution.size() != 0) {
|
||||
attribution = set_attribution;
|
||||
@ -1144,8 +1202,6 @@ int main(int argc, char **argv) {
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<strategy> strategies;
|
||||
|
||||
mbtiles_write_metadata(outdb, out_dir, name.c_str(), st.minzoom, st.maxzoom, st.minlat, st.minlon, st.maxlat, st.maxlon, st.midlat, st.midlon, 0, attribution.size() != 0 ? attribution.c_str() : NULL, layermap, true, description.c_str(), !pg, attribute_descriptions, "tile-join", generator_options, strategies);
|
||||
|
||||
if (outdb != NULL) {
|
||||
|
173
tile.cpp
173
tile.cpp
@ -39,6 +39,7 @@
|
||||
#include "write_json.hpp"
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "evaluator.hpp"
|
||||
#include "errors.hpp"
|
||||
|
||||
extern "C" {
|
||||
#include "jsonpull/jsonpull.h"
|
||||
@ -819,7 +820,7 @@ bool find_common_edges(std::vector<partial> &partials, int z, int line_detail, d
|
||||
|
||||
if (e1.first == e1.second || e2.first == e2.second) {
|
||||
fprintf(stderr, "Internal error: polygon edge lookup failed for %lld,%lld to %lld,%lld or %lld,%lld to %lld,%lld\n", left[0].x, left[0].y, left[1].x, left[1].y, right[0].x, right[0].y, right[1].x, right[1].y);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
if (!edges_same(e1, e2)) {
|
||||
@ -896,7 +897,7 @@ bool find_common_edges(std::vector<partial> &partials, int z, int line_detail, d
|
||||
|
||||
if (tmp.size() != l - k) {
|
||||
fprintf(stderr, "internal error shifting ring\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
for (size_t m = 0; m < tmp.size(); m++) {
|
||||
@ -916,7 +917,7 @@ bool find_common_edges(std::vector<partial> &partials, int z, int line_detail, d
|
||||
for (size_t m = k; m < l; m++) {
|
||||
if (!g[m].necessary) {
|
||||
fprintf(stderr, "internal error in arc building\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
drawvec arc;
|
||||
@ -1596,7 +1597,7 @@ void *run_prefilter(void *v) {
|
||||
decode_meta(sf.keys, sf.values, rpa->stringpool + rpa->pool_off[sf.segment], tmp_layer, tmp_feature);
|
||||
tmp_layer.features.push_back(tmp_feature);
|
||||
|
||||
layer_to_geojson(tmp_layer, 0, 0, 0, false, true, false, true, sf.index, sf.seq, sf.extent, true, state);
|
||||
layer_to_geojson(tmp_layer, 0, 0, 0, false, true, false, true, sf.index, sf.seq, sf.extent, true, state, 0);
|
||||
}
|
||||
|
||||
if (fclose(rpa->prefilter_fp) != 0) {
|
||||
@ -1608,7 +1609,7 @@ void *run_prefilter(void *v) {
|
||||
}
|
||||
} else {
|
||||
perror("fclose output to prefilter");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
@ -1632,7 +1633,7 @@ void add_tilestats(std::string const &layername, int z, std::vector<std::map<std
|
||||
auto fk = layermap.find(layername);
|
||||
if (fk == layermap.end()) {
|
||||
fprintf(stderr, "Internal error: layer %s not found\n", layername.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
type_and_string attrib;
|
||||
@ -1803,11 +1804,11 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
int max_zoom_increment = std::log(child_shards) / std::log(4);
|
||||
if (child_shards < 4 || max_zoom_increment < 1) {
|
||||
fprintf(stderr, "Internal error: %d shards, max zoom increment %d\n", child_shards, max_zoom_increment);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
if ((((child_shards - 1) << 1) & child_shards) != child_shards) {
|
||||
fprintf(stderr, "Internal error: %d shards not a power of 2\n", child_shards);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
int nextzoom = z + 1;
|
||||
@ -1848,6 +1849,8 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
drawvec shared_nodes;
|
||||
|
||||
int tile_detail = line_detail;
|
||||
size_t skipped = 0;
|
||||
size_t kept = 0;
|
||||
|
||||
int within[child_shards];
|
||||
std::atomic<long long> geompos[child_shards];
|
||||
@ -1859,7 +1862,7 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
if (*geompos_in != og) {
|
||||
if (fseek(geoms, og, SEEK_SET) != 0) {
|
||||
perror("fseek geom");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SEEK);
|
||||
}
|
||||
*geompos_in = og;
|
||||
}
|
||||
@ -1882,7 +1885,7 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
prefilter_fp = fdopen(prefilter_write, "w");
|
||||
if (prefilter_fp == NULL) {
|
||||
perror("freopen prefilter");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
rpa.geoms = geoms;
|
||||
@ -1921,13 +1924,13 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
|
||||
if (pthread_create(&prefilter_writer, NULL, run_prefilter, &rpa) != 0) {
|
||||
perror("pthread_create (prefilter writer)");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
|
||||
prefilter_read_fp = fdopen(prefilter_read, "r");
|
||||
if (prefilter_read_fp == NULL) {
|
||||
perror("fdopen prefilter output");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
prefilter_jp = json_begin_file(prefilter_read_fp);
|
||||
}
|
||||
@ -2074,47 +2077,55 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
}
|
||||
|
||||
if (sf.geometry.size() > 0) {
|
||||
if (prevent[P_SIMPLIFY_SHARED_NODES]) {
|
||||
for (auto &g : sf.geometry) {
|
||||
shared_nodes.push_back(g);
|
||||
if (partials.size() > max_tile_size) {
|
||||
// Even being maximally conservative, each feature is still going to be
|
||||
// at least one byte in the output tile, so this can't possibly work.
|
||||
skipped++;
|
||||
} else {
|
||||
kept++;
|
||||
|
||||
if (prevent[P_SIMPLIFY_SHARED_NODES]) {
|
||||
for (auto &g : sf.geometry) {
|
||||
shared_nodes.push_back(g);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
partial p;
|
||||
p.geoms.push_back(sf.geometry);
|
||||
p.layer = sf.layer;
|
||||
p.t = sf.t;
|
||||
p.segment = sf.segment;
|
||||
p.original_seq = sf.seq;
|
||||
p.reduced = reduced;
|
||||
p.z = z;
|
||||
p.line_detail = line_detail;
|
||||
p.extra_detail = line_detail;
|
||||
p.maxzoom = maxzoom;
|
||||
p.keys = sf.keys;
|
||||
p.values = sf.values;
|
||||
p.full_keys = sf.full_keys;
|
||||
p.full_values = sf.full_values;
|
||||
p.spacing = spacing;
|
||||
p.simplification = simplification;
|
||||
p.id = sf.id;
|
||||
p.has_id = sf.has_id;
|
||||
p.index = sf.index;
|
||||
p.renamed = -1;
|
||||
p.extent = sf.extent;
|
||||
p.clustered = 0;
|
||||
partial p;
|
||||
p.geoms.push_back(sf.geometry);
|
||||
p.layer = sf.layer;
|
||||
p.t = sf.t;
|
||||
p.segment = sf.segment;
|
||||
p.original_seq = sf.seq;
|
||||
p.reduced = reduced;
|
||||
p.z = z;
|
||||
p.line_detail = line_detail;
|
||||
p.extra_detail = line_detail;
|
||||
p.maxzoom = maxzoom;
|
||||
p.keys = sf.keys;
|
||||
p.values = sf.values;
|
||||
p.full_keys = sf.full_keys;
|
||||
p.full_values = sf.full_values;
|
||||
p.spacing = spacing;
|
||||
p.simplification = simplification;
|
||||
p.id = sf.id;
|
||||
p.has_id = sf.has_id;
|
||||
p.index = sf.index;
|
||||
p.renamed = -1;
|
||||
p.extent = sf.extent;
|
||||
p.clustered = 0;
|
||||
|
||||
if (line_detail == detail && extra_detail >= 0 && z == maxzoom) {
|
||||
p.extra_detail = extra_detail;
|
||||
// maximum allowed coordinate delta in geometries is 2^31 - 1
|
||||
// so we need to stay under that, including the buffer
|
||||
if (p.extra_detail >= 30 - z) {
|
||||
p.extra_detail = 30 - z;
|
||||
if (line_detail == detail && extra_detail >= 0 && z == maxzoom) {
|
||||
p.extra_detail = extra_detail;
|
||||
// maximum allowed coordinate delta in geometries is 2^31 - 1
|
||||
// so we need to stay under that, including the buffer
|
||||
if (p.extra_detail >= 30 - z) {
|
||||
p.extra_detail = 30 - z;
|
||||
}
|
||||
tile_detail = p.extra_detail;
|
||||
}
|
||||
tile_detail = p.extra_detail;
|
||||
}
|
||||
|
||||
partials.push_back(p);
|
||||
partials.push_back(p);
|
||||
}
|
||||
}
|
||||
|
||||
merge_previndex = sf.index;
|
||||
@ -2184,13 +2195,13 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
json_end(prefilter_jp);
|
||||
if (fclose(prefilter_read_fp) != 0) {
|
||||
perror("close output from prefilter");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
while (1) {
|
||||
int stat_loc;
|
||||
if (waitpid(prefilter_pid, &stat_loc, 0) < 0) {
|
||||
perror("waitpid for prefilter\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
if (WIFEXITED(stat_loc) || WIFSIGNALED(stat_loc)) {
|
||||
break;
|
||||
@ -2199,7 +2210,7 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
void *ret;
|
||||
if (pthread_join(prefilter_writer, &ret) != 0) {
|
||||
perror("pthread_join prefilter writer");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2227,7 +2238,7 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
if (tasks > 1) {
|
||||
if (pthread_create(&pthreads[i], NULL, partial_feature_worker, &args[i]) != 0) {
|
||||
perror("pthread_create");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
} else {
|
||||
partial_feature_worker(&args[i]);
|
||||
@ -2282,7 +2293,7 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
fprintf(stderr, "Internal error: couldn't find layer %s\n", layername.c_str());
|
||||
fprintf(stderr, "segment %d\n", partials[i].segment);
|
||||
fprintf(stderr, "layer %lld\n", partials[i].layer);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
l->second.push_back(c);
|
||||
}
|
||||
@ -2504,7 +2515,7 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
line_detail++;
|
||||
continue;
|
||||
} else if (additional[A_DROP_SMALLEST_AS_NEEDED] || additional[A_COALESCE_SMALLEST_AS_NEEDED]) {
|
||||
minextent_fraction = minextent_fraction * max_tile_features / totalsize * 0.90;
|
||||
minextent_fraction = minextent_fraction * max_tile_features / totalsize * 0.75;
|
||||
long long m = choose_minextent(extents, minextent_fraction);
|
||||
if (m != minextent) {
|
||||
minextent = m;
|
||||
@ -2550,16 +2561,26 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
}
|
||||
|
||||
if (compressed.size() > max_tile_size && !prevent[P_KILOBYTE_LIMIT]) {
|
||||
// Estimate how big it really should have been compressed
|
||||
// from how many features were kept vs skipped for already being
|
||||
// over the threshold
|
||||
|
||||
double kept_adjust = (skipped + kept) / (double) kept;
|
||||
|
||||
if (compressed.size() > arg->tile_size_out) {
|
||||
arg->tile_size_out = compressed.size();
|
||||
arg->tile_size_out = compressed.size() * kept_adjust;
|
||||
}
|
||||
|
||||
if (!quiet) {
|
||||
fprintf(stderr, "tile %d/%u/%u size is %lld with detail %d, >%zu \n", z, tx, ty, (long long) compressed.size(), line_detail, max_tile_size);
|
||||
if (skipped > 0) {
|
||||
fprintf(stderr, "tile %d/%u/%u size is %lld (probably really %lld) with detail %d, >%zu \n", z, tx, ty, (long long) compressed.size(), (long long) (compressed.size() * kept_adjust), line_detail, max_tile_size);
|
||||
} else {
|
||||
fprintf(stderr, "tile %d/%u/%u size is %lld with detail %d, >%zu \n", z, tx, ty, (long long) compressed.size(), line_detail, max_tile_size);
|
||||
}
|
||||
}
|
||||
|
||||
if (has_polygons && additional[A_MERGE_POLYGONS_AS_NEEDED] && merge_fraction > .05 && merge_successful) {
|
||||
merge_fraction = merge_fraction * max_tile_size / compressed.size() * 0.95;
|
||||
merge_fraction = merge_fraction * max_tile_size / (kept_adjust * compressed.size()) * 0.95;
|
||||
if (!quiet) {
|
||||
fprintf(stderr, "Going to try merging %0.2f%% of the polygons to make it fit\n", 100 - merge_fraction * 100);
|
||||
}
|
||||
@ -2581,7 +2602,7 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
}
|
||||
line_detail++; // to keep it the same when the loop decrements it
|
||||
} else if (mingap < ULONG_MAX && (additional[A_DROP_DENSEST_AS_NEEDED] || additional[A_COALESCE_DENSEST_AS_NEEDED] || additional[A_CLUSTER_DENSEST_AS_NEEDED])) {
|
||||
mingap_fraction = mingap_fraction * max_tile_size / compressed.size() * 0.90;
|
||||
mingap_fraction = mingap_fraction * max_tile_size / (kept_adjust * compressed.size()) * 0.90;
|
||||
unsigned long long mg = choose_mingap(indices, mingap_fraction);
|
||||
if (mg <= mingap) {
|
||||
double nmg = (mingap + 1) * 1.5;
|
||||
@ -2606,7 +2627,7 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
}
|
||||
line_detail++;
|
||||
} else if (additional[A_DROP_SMALLEST_AS_NEEDED] || additional[A_COALESCE_SMALLEST_AS_NEEDED]) {
|
||||
minextent_fraction = minextent_fraction * max_tile_size / compressed.size() * 0.90;
|
||||
minextent_fraction = minextent_fraction * max_tile_size / (kept_adjust * compressed.size()) * 0.75;
|
||||
long long m = choose_minextent(extents, minextent_fraction);
|
||||
if (m != minextent) {
|
||||
minextent = m;
|
||||
@ -2624,7 +2645,7 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
// The 95% is a guess to avoid too many retries
|
||||
// and probably actually varies based on how much duplicated metadata there is
|
||||
|
||||
fraction = fraction * max_tile_size / compressed.size() * 0.95;
|
||||
fraction = fraction * max_tile_size / (kept_adjust * compressed.size()) * 0.95;
|
||||
if (!quiet) {
|
||||
fprintf(stderr, "Going to try keeping %0.2f%% of the features to make it fit\n", fraction * 100);
|
||||
}
|
||||
@ -2642,7 +2663,7 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
if (pass == 1) {
|
||||
if (pthread_mutex_lock(&db_lock) != 0) {
|
||||
perror("pthread_mutex_lock");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
|
||||
if (outdb != NULL) {
|
||||
@ -2653,7 +2674,7 @@ long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *meta
|
||||
|
||||
if (pthread_mutex_unlock(&db_lock) != 0) {
|
||||
perror("pthread_mutex_unlock");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2692,8 +2713,8 @@ void *run_thread(void *vargs) {
|
||||
|
||||
FILE *geom = fdopen(arg->geomfd[j], "rb");
|
||||
if (geom == NULL) {
|
||||
perror("mmap geom");
|
||||
exit(EXIT_FAILURE);
|
||||
perror("open geom");
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
|
||||
std::atomic<long long> geompos(0);
|
||||
@ -2723,7 +2744,7 @@ void *run_thread(void *vargs) {
|
||||
|
||||
if (pthread_mutex_lock(&var_lock) != 0) {
|
||||
perror("pthread_mutex_lock");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
|
||||
if (z == arg->maxzoom) {
|
||||
@ -2748,7 +2769,7 @@ void *run_thread(void *vargs) {
|
||||
|
||||
if (pthread_mutex_unlock(&var_lock) != 0) {
|
||||
perror("pthread_mutex_unlock");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2759,18 +2780,18 @@ void *run_thread(void *vargs) {
|
||||
int newfd = dup(arg->geomfd[j]);
|
||||
if (newfd < 0) {
|
||||
perror("dup geometry");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
if (lseek(newfd, 0, SEEK_SET) < 0) {
|
||||
perror("lseek geometry");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_SEEK);
|
||||
}
|
||||
arg->geomfd[j] = newfd;
|
||||
}
|
||||
|
||||
if (fclose(geom) != 0) {
|
||||
perror("close geom");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2815,12 +2836,12 @@ int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpo
|
||||
// printf("%s\n", geomname);
|
||||
if (subfd[j] < 0) {
|
||||
perror(geomname);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
sub[j] = fopen_oflag(geomname, "wb", O_WRONLY | O_CLOEXEC);
|
||||
if (sub[j] == NULL) {
|
||||
perror(geomname);
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_OPEN);
|
||||
}
|
||||
unlink(geomname);
|
||||
}
|
||||
@ -2982,7 +3003,7 @@ int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpo
|
||||
|
||||
if (pthread_create(&pthreads[thread], NULL, run_thread, &args[thread]) != 0) {
|
||||
perror("pthread_create");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_PTHREAD);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3035,18 +3056,18 @@ int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpo
|
||||
if (geomfd[j] >= 0) {
|
||||
if (close(geomfd[j]) != 0) {
|
||||
perror("close geom");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
}
|
||||
if (fclose(sub[j]) != 0) {
|
||||
perror("close subfile");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
|
||||
struct stat geomst;
|
||||
if (fstat(subfd[j], &geomst) != 0) {
|
||||
perror("stat geom\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_STAT);
|
||||
}
|
||||
|
||||
geomfd[j] = subfd[j];
|
||||
@ -3063,7 +3084,7 @@ int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpo
|
||||
if (geomfd[j] >= 0) {
|
||||
if (close(geomfd[j]) != 0) {
|
||||
perror("close geom");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_CLOSE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
#ifndef VERSION_HPP
|
||||
#define VERSION_HPP
|
||||
|
||||
#define VERSION "v2.6.1"
|
||||
#define VERSION "v2.6.2"
|
||||
|
||||
#endif
|
||||
|
@ -14,6 +14,7 @@
|
||||
#include "mvt.hpp"
|
||||
#include "write_json.hpp"
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "errors.hpp"
|
||||
|
||||
void json_writer::json_adjust() {
|
||||
if (state.size() == 0) {
|
||||
@ -62,7 +63,7 @@ void json_writer::json_adjust() {
|
||||
state[state.size() - 1] = JSON_WRITE_ARRAY_ELEMENT;
|
||||
} else {
|
||||
fprintf(stderr, "Impossible JSON state\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
}
|
||||
|
||||
@ -76,7 +77,7 @@ void json_writer::json_write_array() {
|
||||
void json_writer::json_end_array() {
|
||||
if (state.size() == 0) {
|
||||
fprintf(stderr, "End JSON array at top level\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
json_write_tok tok = state[state.size() - 1];
|
||||
@ -90,7 +91,7 @@ void json_writer::json_end_array() {
|
||||
addc(']');
|
||||
} else {
|
||||
fprintf(stderr, "End JSON array with unexpected state\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
}
|
||||
|
||||
@ -104,7 +105,7 @@ void json_writer::json_write_hash() {
|
||||
void json_writer::json_end_hash() {
|
||||
if (state.size() == 0) {
|
||||
fprintf(stderr, "End JSON hash at top level\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
|
||||
json_write_tok tok = state[state.size() - 1];
|
||||
@ -124,7 +125,7 @@ void json_writer::json_end_hash() {
|
||||
addc('}');
|
||||
} else {
|
||||
fprintf(stderr, "End JSON hash with unexpected state\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_JSON);
|
||||
}
|
||||
}
|
||||
|
||||
@ -207,7 +208,7 @@ void json_writer::aprintf(const char *format, ...) {
|
||||
va_start(ap, format);
|
||||
if (vasprintf(&tmp, format, ap) < 0) {
|
||||
fprintf(stderr, "memory allocation failure\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_MEMORY);
|
||||
}
|
||||
va_end(ap);
|
||||
|
||||
@ -247,7 +248,17 @@ struct lonlat {
|
||||
}
|
||||
};
|
||||
|
||||
void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y, bool comma, bool name, bool zoom, bool dropped, unsigned long long index, long long sequence, long long extent, bool complain, json_writer &state) {
|
||||
void write_coords(json_writer &state, lonlat const &ll, double scale) {
|
||||
if (scale == 0) {
|
||||
state.json_write_float(ll.lon);
|
||||
state.json_write_float(ll.lat);
|
||||
} else {
|
||||
state.json_write_number(ll.x / scale);
|
||||
state.json_write_number(ll.y / scale);
|
||||
}
|
||||
}
|
||||
|
||||
void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y, bool comma, bool name, bool zoom, bool dropped, unsigned long long index, long long sequence, long long extent, bool complain, json_writer &state, double scale) {
|
||||
for (size_t f = 0; f < layer.features.size(); f++) {
|
||||
mvt_feature const &feat = layer.features[f];
|
||||
|
||||
@ -306,11 +317,11 @@ void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y
|
||||
for (size_t t = 0; t + 1 < feat.tags.size(); t += 2) {
|
||||
if (feat.tags[t] >= layer.keys.size()) {
|
||||
fprintf(stderr, "Error: out of bounds feature key (%u in %zu)\n", feat.tags[t], layer.keys.size());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
if (feat.tags[t + 1] >= layer.values.size()) {
|
||||
fprintf(stderr, "Error: out of bounds feature value (%u in %zu)\n", feat.tags[t + 1], layer.values.size());
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
const char *key = layer.keys[feat.tags[t]].c_str();
|
||||
@ -342,7 +353,7 @@ void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y
|
||||
state.json_write_null();
|
||||
} else {
|
||||
fprintf(stderr, "Internal error: property with unknown type\n");
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -359,9 +370,9 @@ void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y
|
||||
long long py = feat.geometry[g].y;
|
||||
|
||||
if (op == VT_MOVETO || op == VT_LINETO) {
|
||||
long long scale = 1LL << (32 - z);
|
||||
long long wx = scale * x + (scale / layer.extent) * px;
|
||||
long long wy = scale * y + (scale / layer.extent) * py;
|
||||
long long wscale = 1LL << (32 - z);
|
||||
long long wx = wscale * x + (wscale / layer.extent) * px;
|
||||
long long wy = wscale * y + (wscale / layer.extent) * py;
|
||||
|
||||
double lat, lon;
|
||||
projection->unproject(wx, wy, 32, &lon, &lat);
|
||||
@ -380,8 +391,7 @@ void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y
|
||||
state.json_write_string("coordinates");
|
||||
|
||||
state.json_write_array();
|
||||
state.json_write_float(ops[0].lon);
|
||||
state.json_write_float(ops[0].lat);
|
||||
write_coords(state, ops[0], scale);
|
||||
state.json_end_array();
|
||||
} else {
|
||||
state.json_write_string("type");
|
||||
@ -392,8 +402,7 @@ void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y
|
||||
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
state.json_write_array();
|
||||
state.json_write_float(ops[i].lon);
|
||||
state.json_write_float(ops[i].lat);
|
||||
write_coords(state, ops[i], scale);
|
||||
state.json_end_array();
|
||||
}
|
||||
|
||||
@ -416,8 +425,7 @@ void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y
|
||||
|
||||
for (size_t i = 0; i < ops.size(); i++) {
|
||||
state.json_write_array();
|
||||
state.json_write_float(ops[i].lon);
|
||||
state.json_write_float(ops[i].lat);
|
||||
write_coords(state, ops[i], scale);
|
||||
state.json_end_array();
|
||||
}
|
||||
|
||||
@ -435,8 +443,7 @@ void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y
|
||||
if (ops[i].op == VT_MOVETO) {
|
||||
if (sstate == 0) {
|
||||
state.json_write_array();
|
||||
state.json_write_float(ops[i].lon);
|
||||
state.json_write_float(ops[i].lat);
|
||||
write_coords(state, ops[i], scale);
|
||||
state.json_end_array();
|
||||
|
||||
sstate = 1;
|
||||
@ -445,16 +452,14 @@ void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y
|
||||
state.json_write_array();
|
||||
|
||||
state.json_write_array();
|
||||
state.json_write_float(ops[i].lon);
|
||||
state.json_write_float(ops[i].lat);
|
||||
write_coords(state, ops[i], scale);
|
||||
state.json_end_array();
|
||||
|
||||
sstate = 1;
|
||||
}
|
||||
} else {
|
||||
state.json_write_array();
|
||||
state.json_write_float(ops[i].lon);
|
||||
state.json_write_float(ops[i].lat);
|
||||
write_coords(state, ops[i], scale);
|
||||
state.json_end_array();
|
||||
}
|
||||
}
|
||||
@ -488,7 +493,7 @@ void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y
|
||||
if (!warned) {
|
||||
fprintf(stderr, "Ring does not end with closepath (ends with %d)\n", ops[i].op);
|
||||
if (complain) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
warned = true;
|
||||
@ -542,7 +547,7 @@ void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y
|
||||
if (!warned) {
|
||||
fprintf(stderr, "Polygon begins with an inner ring\n");
|
||||
if (complain) {
|
||||
exit(EXIT_FAILURE);
|
||||
exit(EXIT_IMPOSSIBLE);
|
||||
}
|
||||
|
||||
warned = true;
|
||||
@ -570,13 +575,11 @@ void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y
|
||||
for (size_t j = 0; j < rings[i].size(); j++) {
|
||||
if (rings[i][j].op != VT_CLOSEPATH) {
|
||||
state.json_write_array();
|
||||
state.json_write_float(rings[i][j].lon);
|
||||
state.json_write_float(rings[i][j].lat);
|
||||
write_coords(state, rings[i][j], scale);
|
||||
state.json_end_array();
|
||||
} else {
|
||||
state.json_write_array();
|
||||
state.json_write_float(rings[i][0].lon);
|
||||
state.json_write_float(rings[i][0].lat);
|
||||
write_coords(state, rings[i][j], scale);
|
||||
state.json_end_array();
|
||||
}
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ struct json_writer {
|
||||
void adds(std::string const &s);
|
||||
};
|
||||
|
||||
void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y, bool comma, bool name, bool zoom, bool dropped, unsigned long long index, long long sequence, long long extent, bool complain, json_writer &state);
|
||||
void layer_to_geojson(mvt_layer const &layer, unsigned z, unsigned x, unsigned y, bool comma, bool name, bool zoom, bool dropped, unsigned long long index, long long sequence, long long extent, bool complain, json_writer &state, double scale);
|
||||
void fprintq(FILE *f, const char *s);
|
||||
|
||||
#endif
|
||||
|
Loading…
x
Reference in New Issue
Block a user