Merge pull request #492 from mapbox/rate-clarify

Clarify documentation; add --no-progress-indicator option; fix many compiler warnings
This commit is contained in:
Eric Fischer 2017-11-17 14:11:22 -08:00 committed by GitHub
commit 4754084130
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
28 changed files with 523 additions and 468 deletions

View File

@ -1,3 +1,8 @@
## 1.26.7
* Add an option to quiet the progress indicator but not warnings
* Enable more compiler warnings and fix related problems
## 1.26.6
* Be more careful about checking for overflow when parsing numbers

View File

@ -9,7 +9,7 @@ CXX := $(CXX)
CFLAGS := $(CFLAGS)
CXXFLAGS := $(CXXFLAGS) -std=c++11
LDFLAGS := $(LDFLAGS)
WARNING_FLAGS := -Wall -Wshadow -Wsign-compare
WARNING_FLAGS := -Wall -Wshadow -Wsign-compare -Wextra -Wunreachable-code -Wuninitialized -Wshadow
RELEASE_FLAGS := -O3 -DNDEBUG
DEBUG_FLAGS := -O0 -DDEBUG -fno-inline-functions -fno-omit-frame-pointer

View File

@ -96,13 +96,15 @@ delete the file that already exists with that name.
If you aren't sure what the right maxzoom is for your data, `-zg` will guess one for you
based on the density of features.
If you are mapping point features, you will often want to use `-Bg` to automatically choose
a base zoom level for dot dropping. If that doesn't work out for you, try
`-r1 --drop-fraction-as-needed` to turn off the normal dot dropping and instead
only drop features if the tiles get too big.
Tippecanoe will normally drop a fraction of point features at zooms below the maxzoom,
to keep the low-zoom tiles from getting too big. If you have a smaller data set where
all the points would fit without dropping any of them, use `-r1` to keep them all.
If you do want point dropping, but you still want the tiles to be denser than `-zg`
thinks they should be, use `-B` to set a basezoom lower than the maxzoom.
If you are mapping points or polygons, you will often want to use `--drop-densest-as-needed`
to drop some of them if necessary to make the low zoom levels work.
If some of your tiles are coming out too big in spite of the settings above, you will
often want to use `--drop-densest-as-needed` to drop whatever fraction of the features
is necessary at each zoom level to make that zoom level's tiles work.
If your features have a lot of attributes, use `-y` to keep only the ones you really need.
@ -262,7 +264,8 @@ tippecanoe -z5 -o filtered.mbtiles -j '{ "ne_10m_admin_0_countries": [ "all", [
### Progress indicator
* `-q` or `--quiet`: Work quietly instead of reporting progress
* `-q` or `--quiet`: Work quietly instead of reporting progress or warning messages
* `-Q` or `--no-progress-indicator`: Don't report progress, but still give warnings
* `-v` or `--version`: Report Tippecanoe's version number
### Filters

View File

@ -120,7 +120,7 @@ void handle(std::string message, int z, unsigned x, unsigned y, int describe, st
}
// X and Y are unsigned, so no need to check <0
if (x > (1 << z) || y > (1 << z)) {
if (x > (1ULL << z) || y > (1ULL << z)) {
fprintf(stderr, "Impossible tile %d/%u/%u\n", z, x, y);
exit(EXIT_FAILURE);
}

View File

@ -22,13 +22,13 @@
#define MULTIPOLYGON 5
struct queued_feature {
protozero::pbf_reader pbf;
size_t dim;
double e;
std::vector<std::string> *keys;
struct serialization_state *sst;
int layer;
std::string layername;
protozero::pbf_reader pbf{};
size_t dim = 0;
double e = 0;
std::vector<std::string> *keys = NULL;
std::vector<struct serialization_state> *sst = NULL;
int layer = 0;
std::string layername = "";
};
static std::vector<queued_feature> feature_queue;
@ -40,7 +40,7 @@ void ensureDim(size_t dim) {
}
}
serial_val readValue(protozero::pbf_reader &pbf, std::vector<std::string> &keys) {
serial_val readValue(protozero::pbf_reader &pbf) {
serial_val sv;
sv.type = mvt_null;
sv.s = "null";
@ -94,7 +94,7 @@ serial_val readValue(protozero::pbf_reader &pbf, std::vector<std::string> &keys)
return sv;
}
drawvec readPoint(std::vector<long long> &coords, std::vector<int> &lengths, size_t dim, double e) {
drawvec readPoint(std::vector<long long> &coords, size_t dim, double e) {
ensureDim(dim);
long long x, y;
@ -104,7 +104,7 @@ drawvec readPoint(std::vector<long long> &coords, std::vector<int> &lengths, siz
return dv;
}
drawvec readLinePart(std::vector<long long> &coords, std::vector<int> &lengths, size_t dim, double e, size_t start, size_t end, bool closed) {
drawvec readLinePart(std::vector<long long> &coords, size_t dim, double e, size_t start, size_t end, bool closed) {
ensureDim(dim);
drawvec dv;
@ -141,19 +141,19 @@ drawvec readLinePart(std::vector<long long> &coords, std::vector<int> &lengths,
return dv;
}
drawvec readLine(std::vector<long long> &coords, std::vector<int> &lengths, size_t dim, double e, bool closed) {
return readLinePart(coords, lengths, dim, e, 0, coords.size(), closed);
drawvec readLine(std::vector<long long> &coords, size_t dim, double e, bool closed) {
return readLinePart(coords, dim, e, 0, coords.size(), closed);
}
drawvec readMultiLine(std::vector<long long> &coords, std::vector<int> &lengths, size_t dim, double e, bool closed) {
if (lengths.size() == 0) {
return readLinePart(coords, lengths, dim, e, 0, coords.size(), closed);
return readLinePart(coords, dim, e, 0, coords.size(), closed);
}
drawvec dv;
size_t here = 0;
for (size_t i = 0; i < lengths.size(); i++) {
drawvec dv2 = readLinePart(coords, lengths, dim, e, here, here + lengths[i] * dim, closed);
drawvec dv2 = readLinePart(coords, dim, e, here, here + lengths[i] * dim, closed);
here += lengths[i] * dim;
for (size_t j = 0; j < dv2.size(); j++) {
@ -168,7 +168,7 @@ drawvec readMultiPolygon(std::vector<long long> &coords, std::vector<int> &lengt
ensureDim(dim);
if (lengths.size() == 0) {
return readLinePart(coords, lengths, dim, e, 0, coords.size(), true);
return readLinePart(coords, dim, e, 0, coords.size(), true);
}
size_t polys = lengths[0];
@ -180,7 +180,7 @@ drawvec readMultiPolygon(std::vector<long long> &coords, std::vector<int> &lengt
size_t rings = lengths[n++];
for (size_t j = 0; j < rings; j++) {
drawvec dv2 = readLinePart(coords, lengths, dim, e, here, here + lengths[n] * dim, true);
drawvec dv2 = readLinePart(coords, dim, e, here, here + lengths[n] * dim, true);
here += lengths[n] * dim;
n++;
@ -196,8 +196,8 @@ drawvec readMultiPolygon(std::vector<long long> &coords, std::vector<int> &lengt
}
struct drawvec_type {
drawvec dv;
int type;
drawvec dv{};
int type = 0;
};
std::vector<drawvec_type> readGeometry(protozero::pbf_reader &pbf, size_t dim, double e, std::vector<std::string> &keys) {
@ -246,11 +246,11 @@ std::vector<drawvec_type> readGeometry(protozero::pbf_reader &pbf, size_t dim, d
drawvec_type dv;
if (type == POINT) {
dv.dv = readPoint(coords, lengths, dim, e);
dv.dv = readPoint(coords, dim, e);
} else if (type == MULTIPOINT) {
dv.dv = readLine(coords, lengths, dim, e, false);
dv.dv = readLine(coords, dim, e, false);
} else if (type == LINESTRING) {
dv.dv = readLine(coords, lengths, dim, e, false);
dv.dv = readLine(coords, dim, e, false);
} else if (type == POLYGON) {
dv.dv = readMultiLine(coords, lengths, dim, e, true);
} else if (type == MULTIPOLYGON) {
@ -311,7 +311,7 @@ void readFeature(protozero::pbf_reader &pbf, size_t dim, double e, std::vector<s
case 13: {
protozero::pbf_reader value_reader(pbf.get_message());
values.push_back(readValue(value_reader, keys));
values.push_back(readValue(value_reader));
break;
}
@ -425,6 +425,10 @@ struct queue_run_arg {
size_t start;
size_t end;
size_t segment;
queue_run_arg(size_t start1, size_t end1, size_t segment1)
: start(start1), end(end1), segment(segment1) {
}
};
void *run_parse_feature(void *v) {
@ -432,7 +436,7 @@ void *run_parse_feature(void *v) {
for (size_t i = qra->start; i < qra->end; i++) {
struct queued_feature &qf = feature_queue[i];
readFeature(qf.pbf, qf.dim, qf.e, *qf.keys, &qf.sst[qra->segment], qf.layer, qf.layername);
readFeature(qf.pbf, qf.dim, qf.e, *qf.keys, &(*qf.sst)[qra->segment], qf.layer, qf.layername);
}
return NULL;
@ -443,16 +447,21 @@ void runQueue() {
return;
}
struct queue_run_arg qra[CPUS];
pthread_t pthreads[CPUS];
std::vector<struct queue_run_arg> qra;
std::vector<pthread_t> pthreads;
pthreads.resize(CPUS);
for (size_t i = 0; i < CPUS; i++) {
*(feature_queue[0].sst[i].layer_seq) = *(feature_queue[0].sst[0].layer_seq) + feature_queue.size() * i / CPUS;
*((*(feature_queue[0].sst))[i].layer_seq) = *((*(feature_queue[0].sst))[0].layer_seq) + feature_queue.size() * i / CPUS;
qra[i].start = feature_queue.size() * i / CPUS;
qra[i].end = feature_queue.size() * (i + 1) / CPUS;
qra[i].segment = i;
qra.push_back(queue_run_arg(
feature_queue.size() * i / CPUS,
feature_queue.size() * (i + 1) / CPUS,
i));
}
for (size_t i = 0; i < CPUS; i++) {
if (pthread_create(&pthreads[i], NULL, run_parse_feature, &qra[i]) != 0) {
perror("pthread_create");
exit(EXIT_FAILURE);
@ -467,11 +476,11 @@ void runQueue() {
}
}
*(feature_queue[0].sst[0].layer_seq) = *(feature_queue[0].sst[CPUS - 1].layer_seq);
*((*(feature_queue[0].sst))[0].layer_seq) = *((*(feature_queue[0].sst))[CPUS - 1].layer_seq);
feature_queue.clear();
}
void queueFeature(protozero::pbf_reader &pbf, size_t dim, double e, std::vector<std::string> &keys, struct serialization_state *sst, int layer, std::string layername) {
void queueFeature(protozero::pbf_reader &pbf, size_t dim, double e, std::vector<std::string> &keys, std::vector<struct serialization_state> *sst, int layer, std::string layername) {
struct queued_feature qf;
qf.pbf = pbf;
qf.dim = dim;
@ -488,7 +497,7 @@ void queueFeature(protozero::pbf_reader &pbf, size_t dim, double e, std::vector<
}
}
void outBareGeometry(drawvec const &dv, int type, size_t dim, double e, std::vector<std::string> &keys, struct serialization_state *sst, int layer, std::string layername) {
void outBareGeometry(drawvec const &dv, int type, struct serialization_state *sst, int layer, std::string layername) {
serial_feature sf;
sf.layer = layer;
@ -506,7 +515,7 @@ void outBareGeometry(drawvec const &dv, int type, size_t dim, double e, std::vec
serialize_feature(sst, sf);
}
void readFeatureCollection(protozero::pbf_reader &pbf, size_t dim, double e, std::vector<std::string> &keys, struct serialization_state *sst, int layer, std::string layername) {
void readFeatureCollection(protozero::pbf_reader &pbf, size_t dim, double e, std::vector<std::string> &keys, std::vector<struct serialization_state> *sst, int layer, std::string layername) {
while (pbf.next()) {
switch (pbf.tag()) {
case 1: {
@ -521,7 +530,7 @@ void readFeatureCollection(protozero::pbf_reader &pbf, size_t dim, double e, std
}
}
void parse_geobuf(struct serialization_state *sst, const char *src, size_t len, int layer, std::string layername) {
void parse_geobuf(std::vector<struct serialization_state> *sst, const char *src, size_t len, int layer, std::string layername) {
protozero::pbf_reader pbf(src, len);
size_t dim = 2;
@ -558,7 +567,8 @@ void parse_geobuf(struct serialization_state *sst, const char *src, size_t len,
protozero::pbf_reader geometry_reader(pbf.get_message());
std::vector<drawvec_type> dv = readGeometry(geometry_reader, dim, e, keys);
for (size_t i = 0; i < dv.size(); i++) {
outBareGeometry(dv[i].dv, dv[i].type, dim, e, keys, sst, layer, layername);
// Always on thread 0
outBareGeometry(dv[i].dv, dv[i].type, &(*sst)[0], layer, layername);
}
break;
}

View File

@ -8,6 +8,6 @@
#include "mbtiles.hpp"
#include "serial.hpp"
void parse_geobuf(struct serialization_state *sst, const char *s, size_t len, int layer, std::string layername);
void parse_geobuf(std::vector<struct serialization_state> *sst, const char *s, size_t len, int layer, std::string layername);
#endif

View File

@ -146,10 +146,15 @@ int serialize_geojson_feature(struct serialization_state *sst, json_object *geom
nprop = properties->length;
}
char *metakey[nprop];
std::vector<char *> metakey;
metakey.resize(nprop);
std::vector<std::string> metaval;
metaval.resize(nprop);
int metatype[nprop];
std::vector<int> metatype;
metatype.resize(nprop);
size_t m = 0;
for (size_t i = 0; i < nprop; i++) {
@ -158,7 +163,7 @@ int serialize_geojson_feature(struct serialization_state *sst, json_object *geom
int type = -1;
std::string val;
stringify_value(properties->values[i], type, val, sst->fname, sst->line, feature, properties->keys[i]->string);
stringify_value(properties->values[i], type, val, sst->fname, sst->line, feature);
if (type >= 0) {
metakey[m] = properties->keys[i]->string;

View File

@ -13,10 +13,12 @@ struct parse_json_args {
json_pull *jp;
int layer;
std::string *layername;
std::map<std::string, int> const *attribute_types;
bool want_dist;
struct serialization_state *sst;
parse_json_args(json_pull *jp1, int layer1, std::string *layername1, struct serialization_state *sst1)
: jp(jp1), layer(layer1), layername(layername1), sst(sst1) {
}
};
struct json_pull *json_begin_map(char *map, long long len);

View File

@ -219,7 +219,7 @@ static void decode_clipped(mapbox::geometry::multi_polygon<long long> &t, drawve
}
}
drawvec clean_or_clip_poly(drawvec &geom, int z, int detail, int buffer, bool clip) {
drawvec clean_or_clip_poly(drawvec &geom, int z, int buffer, bool clip) {
mapbox::geometry::wagyu::wagyu<long long> wagyu;
geom = remove_noop(geom, VT_POLYGON, 0);
@ -345,7 +345,7 @@ static int pnpoly(drawvec &vert, size_t start, size_t nvert, long long testx, lo
return c;
}
void check_polygon(drawvec &geom, drawvec &before) {
void check_polygon(drawvec &geom) {
geom = remove_noop(geom, VT_POLYGON, 0);
mapbox::geometry::multi_polygon<long long> mp;
@ -635,7 +635,7 @@ int quick_check(long long *bbox, int z, long long buffer) {
return 2;
}
bool point_within_tile(long long x, long long y, int z, long long buffer) {
bool point_within_tile(long long x, long long y, int z) {
// No adjustment for buffer, because the point must be
// strictly within the tile to appear exactly once

View File

@ -22,18 +22,18 @@ struct draw {
long long y : 40;
signed char necessary;
draw(int nop, long long nx, long long ny) {
this->op = nop;
this->x = nx;
this->y = ny;
this->necessary = 0;
draw(int nop, long long nx, long long ny)
: x(nx),
op(nop),
y(ny),
necessary(0) {
}
draw() {
this->op = 0;
this->x = 0;
this->y = 0;
this->necessary = 0;
draw()
: x(0),
op(0),
y(0),
necessary(0) {
}
bool operator<(draw const &s) const {
@ -59,19 +59,19 @@ drawvec decode_geometry(FILE *meta, long long *geompos, int z, unsigned tx, unsi
void to_tile_scale(drawvec &geom, int z, int detail);
drawvec remove_noop(drawvec geom, int type, int shift);
drawvec clip_point(drawvec &geom, int z, long long buffer);
drawvec clean_or_clip_poly(drawvec &geom, int z, int detail, int buffer, bool clip);
drawvec clean_or_clip_poly(drawvec &geom, int z, int buffer, bool clip);
drawvec simple_clip_poly(drawvec &geom, int z, int buffer);
drawvec close_poly(drawvec &geom);
drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double *accum_area);
drawvec clip_lines(drawvec &geom, int z, long long buffer);
drawvec stairstep(drawvec &geom, int z, int detail);
bool point_within_tile(long long x, long long y, int z, long long buffer);
bool point_within_tile(long long x, long long y, int z);
int quick_check(long long *bbox, int z, long long buffer);
drawvec simplify_lines(drawvec &geom, int z, int detail, bool mark_tile_bounds, double simplification, size_t retain);
drawvec reorder_lines(drawvec &geom);
drawvec fix_polygon(drawvec &geom);
std::vector<drawvec> chop_polygon(std::vector<drawvec> &geoms);
void check_polygon(drawvec &geom, drawvec &before);
void check_polygon(drawvec &geom);
double get_area(drawvec &geom, size_t i, size_t j);
double get_mp_area(drawvec &geom);

201
main.cpp
View File

@ -63,6 +63,7 @@ static int full_detail = -1;
static int min_detail = 7;
int quiet = 0;
int quiet_progress = 0;
int geometry_scale = 0;
double simplification = 1;
size_t max_tile_size = 500000;
@ -71,8 +72,8 @@ int prevent[256];
int additional[256];
struct source {
std::string layer;
std::string file;
std::string layer = "";
std::string file = "";
};
size_t CPUS;
@ -80,13 +81,12 @@ size_t TEMP_FILES;
long long MAX_FILES;
static long long diskfree;
void checkdisk(struct reader *r, int nreader) {
void checkdisk(std::vector<struct reader> *r) {
long long used = 0;
int i;
for (i = 0; i < nreader; i++) {
for (size_t i = 0; i < r->size(); i++) {
// Meta, pool, and tree are used once.
// Geometry and index will be duplicated during sorting and tiling.
used += r[i].metapos + 2 * r[i].geompos + 2 * r[i].indexpos + r[i].poolfile->len + r[i].treefile->len;
used += (*r)[i].metapos + 2 * (*r)[i].geompos + 2 * (*r)[i].indexpos + (*r)[i].poolfile->len + (*r)[i].treefile->len;
}
static int warned = 0;
@ -167,9 +167,9 @@ int indexcmp(const void *v1, const void *v2) {
const struct index *i1 = (const struct index *) v1;
const struct index *i2 = (const struct index *) v2;
if (i1->index < i2->index) {
if (i1->ix < i2->ix) {
return -1;
} else if (i1->index > i2->index) {
} else if (i1->ix > i2->ix) {
return 1;
}
@ -209,10 +209,10 @@ struct drop_state {
unsigned y;
};
int calc_feature_minzoom(struct index *ix, struct drop_state *ds, int maxzoom, int basezoom, double droprate, double gamma) {
int calc_feature_minzoom(struct index *ix, struct drop_state *ds, int maxzoom, double gamma) {
int feature_minzoom = 0;
unsigned xx, yy;
decode(ix->index, &xx, &yy);
decode(ix->ix, &xx, &yy);
if (gamma >= 0 && (ix->t == VT_POINT ||
(additional[A_LINE_DROP] && ix->t == VT_LINE) ||
@ -255,7 +255,7 @@ int calc_feature_minzoom(struct index *ix, struct drop_state *ds, int maxzoom, i
return feature_minzoom;
}
static void merge(struct mergelist *merges, size_t nmerges, unsigned char *map, FILE *indexfile, int bytes, long long nrec, char *geom_map, FILE *geom_out, long long *geompos, long long *progress, long long *progress_max, long long *progress_reported, int maxzoom, int basezoom, double droprate, double gamma, struct drop_state *ds) {
static void merge(struct mergelist *merges, size_t nmerges, unsigned char *map, FILE *indexfile, int bytes, char *geom_map, FILE *geom_out, long long *geompos, long long *progress, long long *progress_max, long long *progress_reported, int maxzoom, double gamma, struct drop_state *ds) {
struct mergelist *head = NULL;
for (size_t i = 0; i < nmerges; i++) {
@ -269,12 +269,12 @@ static void merge(struct mergelist *merges, size_t nmerges, unsigned char *map,
long long pos = *geompos;
fwrite_check(geom_map + ix.start, 1, ix.end - ix.start, geom_out, "merge geometry");
*geompos += ix.end - ix.start;
int feature_minzoom = calc_feature_minzoom(&ix, ds, maxzoom, basezoom, droprate, gamma);
int feature_minzoom = calc_feature_minzoom(&ix, ds, maxzoom, gamma);
serialize_byte(geom_out, feature_minzoom, geompos, "merge geometry");
// Count this as an 75%-accomplishment, since we already 25%-counted it
*progress += (ix.end - ix.start) * 3 / 4;
if (!quiet && 100 * *progress / *progress_max != *progress_reported) {
if (!quiet && !quiet_progress && 100 * *progress / *progress_max != *progress_reported) {
fprintf(stderr, "Reordering geometry: %lld%% \r", 100 * *progress / *progress_max);
*progress_reported = 100 * *progress / *progress_max;
}
@ -303,6 +303,10 @@ struct sort_arg {
size_t nmerges;
long long unit;
int bytes;
sort_arg(int task1, int cpus1, long long indexpos1, struct mergelist *merges1, int indexfd1, size_t nmerges1, long long unit1, int bytes1)
: task(task1), cpus(cpus1), indexpos(indexpos1), merges(merges1), indexfd(indexfd1), nmerges(nmerges1), unit(unit1), bytes(bytes1) {
}
};
void *run_sort(void *v) {
@ -350,7 +354,7 @@ void *run_sort(void *v) {
return NULL;
}
void do_read_parallel(char *map, long long len, long long initial_offset, const char *reading, struct reader *readers, volatile long long *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, json_object *filter, char *fname, int basezoom, int source, int nlayers, std::vector<std::map<std::string, layermap_entry> > *layermaps, double droprate, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
void do_read_parallel(char *map, long long len, long long initial_offset, const char *reading, std::vector<struct reader> *readers, volatile long long *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, json_object *filter, int basezoom, int source, std::vector<std::map<std::string, layermap_entry> > *layermaps, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
long long segs[CPUS + 1];
segs[0] = 0;
segs[CPUS] = len;
@ -374,8 +378,11 @@ void do_read_parallel(char *map, long long len, long long initial_offset, const
dist_sums[i] = dist_counts[i] = 0;
}
struct parse_json_args pja[CPUS];
struct serialization_state sst[CPUS];
std::vector<parse_json_args> pja;
std::vector<serialization_state> sst;
sst.resize(CPUS);
pthread_t pthreads[CPUS];
std::vector<std::set<type_and_string> > file_subkeys;
@ -407,12 +414,14 @@ void do_read_parallel(char *map, long long len, long long initial_offset, const
sst[i].basezoom = basezoom;
sst[i].attribute_types = attribute_types;
pja[i].jp = json_begin_map(map + segs[i], segs[i + 1] - segs[i]);
pja[i].layer = source;
pja[i].layername = &layername;
pja[i].sst = &sst[i];
pja.push_back(parse_json_args(
json_begin_map(map + segs[i], segs[i + 1] - segs[i]),
source,
&layername,
&sst[i]));
}
for (size_t i = 0; i < CPUS; i++) {
if (pthread_create(&pthreads[i], NULL, run_parse_json, &pja[i]) != 0) {
perror("pthread_create");
exit(EXIT_FAILURE);
@ -434,37 +443,34 @@ void do_read_parallel(char *map, long long len, long long initial_offset, const
}
struct read_parallel_arg {
int fd;
FILE *fp;
long long offset;
long long len;
volatile int *is_parsing;
int separator;
int fd = 0;
FILE *fp = NULL;
long long offset = 0;
long long len = 0;
volatile int *is_parsing = NULL;
int separator = 0;
const char *reading;
struct reader *readers;
volatile long long *progress_seq;
std::set<std::string> *exclude;
std::set<std::string> *include;
int exclude_all;
json_object *filter;
char *fname;
int maxzoom;
int basezoom;
int source;
int nlayers;
std::vector<std::map<std::string, layermap_entry> > *layermaps;
double droprate;
int *initialized;
unsigned *initial_x;
unsigned *initial_y;
std::string layername;
bool uses_gamma;
std::map<std::string, int> const *attribute_types;
double *dist_sum;
size_t *dist_count;
bool want_dist;
bool filters;
const char *reading = NULL;
std::vector<struct reader> *readers = NULL;
volatile long long *progress_seq = NULL;
std::set<std::string> *exclude = NULL;
std::set<std::string> *include = NULL;
int exclude_all = 0;
json_object *filter = NULL;
int maxzoom = 0;
int basezoom = 0;
int source = 0;
std::vector<std::map<std::string, layermap_entry> > *layermaps = NULL;
int *initialized = NULL;
unsigned *initial_x = NULL;
unsigned *initial_y = NULL;
std::string layername = "";
bool uses_gamma = false;
std::map<std::string, int> const *attribute_types = NULL;
double *dist_sum = NULL;
size_t *dist_count = NULL;
bool want_dist = false;
bool filters = false;
};
void *run_read_parallel(void *v) {
@ -486,7 +492,7 @@ void *run_read_parallel(void *v) {
}
madvise(map, rpa->len, MADV_RANDOM); // sequential, but from several pointers at once
do_read_parallel(map, rpa->len, rpa->offset, rpa->reading, rpa->readers, rpa->progress_seq, rpa->exclude, rpa->include, rpa->exclude_all, rpa->filter, rpa->fname, rpa->basezoom, rpa->source, rpa->nlayers, rpa->layermaps, rpa->droprate, rpa->initialized, rpa->initial_x, rpa->initial_y, rpa->maxzoom, rpa->layername, rpa->uses_gamma, rpa->attribute_types, rpa->separator, rpa->dist_sum, rpa->dist_count, rpa->want_dist, rpa->filters);
do_read_parallel(map, rpa->len, rpa->offset, rpa->reading, rpa->readers, rpa->progress_seq, rpa->exclude, rpa->include, rpa->exclude_all, rpa->filter, rpa->basezoom, rpa->source, rpa->layermaps, rpa->initialized, rpa->initial_x, rpa->initial_y, rpa->maxzoom, rpa->layername, rpa->uses_gamma, rpa->attribute_types, rpa->separator, rpa->dist_sum, rpa->dist_count, rpa->want_dist, rpa->filters);
madvise(map, rpa->len, MADV_DONTNEED);
if (munmap(map, rpa->len) != 0) {
@ -503,7 +509,7 @@ void *run_read_parallel(void *v) {
return NULL;
}
void start_parsing(int fd, FILE *fp, long long offset, long long len, volatile int *is_parsing, pthread_t *parallel_parser, bool &parser_created, const char *reading, struct reader *readers, volatile long long *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, json_object *filter, char *fname, int basezoom, int source, int nlayers, std::vector<std::map<std::string, layermap_entry> > &layermaps, double droprate, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
void start_parsing(int fd, FILE *fp, long long offset, long long len, volatile int *is_parsing, pthread_t *parallel_parser, bool &parser_created, const char *reading, std::vector<struct reader> *readers, volatile long long *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, json_object *filter, int basezoom, int source, std::vector<std::map<std::string, layermap_entry> > &layermaps, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
// This has to kick off an intermediate thread to start the parser threads,
// so the main thread can get back to reading the next input stage while
// the intermediate thread waits for the completion of the parser threads.
@ -530,12 +536,9 @@ void start_parsing(int fd, FILE *fp, long long offset, long long len, volatile i
rpa->include = include;
rpa->exclude_all = exclude_all;
rpa->filter = filter;
rpa->fname = fname;
rpa->basezoom = basezoom;
rpa->source = source;
rpa->nlayers = nlayers;
rpa->layermaps = &layermaps;
rpa->droprate = droprate;
rpa->initialized = initialized;
rpa->initial_x = initial_x;
rpa->initial_y = initial_y;
@ -633,7 +636,7 @@ void radix1(int *geomfds_in, int *indexfds_in, int inputs, int prefix, int split
for (size_t a = 0; a < indexst.st_size / sizeof(struct index); a++) {
struct index ix = indexmap[a];
unsigned long long which = (ix.index << prefix) >> (64 - splitbits);
unsigned long long which = (ix.ix << prefix) >> (64 - splitbits);
long long pos = sub_geompos[which];
fwrite_check(geommap + ix.start, ix.end - ix.start, 1, geomfiles[which], "geom");
@ -641,7 +644,7 @@ void radix1(int *geomfds_in, int *indexfds_in, int inputs, int prefix, int split
// Count this as a 25%-accomplishment, since we will copy again
*progress += (ix.end - ix.start) / 4;
if (!quiet && 100 * *progress / *progress_max != *progress_reported) {
if (!quiet && !quiet_progress && 100 * *progress / *progress_max != *progress_reported) {
fprintf(stderr, "Reordering geometry: %lld%% \r", 100 * *progress / *progress_max);
*progress_reported = 100 * *progress / *progress_max;
}
@ -728,18 +731,21 @@ void radix1(int *geomfds_in, int *indexfds_in, int inputs, int prefix, int split
}
pthread_t pthreads[CPUS];
struct sort_arg args[CPUS];
std::vector<sort_arg> args;
for (size_t a = 0; a < CPUS; a++) {
args[a].task = a;
args[a].cpus = CPUS;
args[a].indexpos = indexpos;
args[a].merges = merges;
args[a].indexfd = indexfds[i];
args[a].nmerges = nmerges;
args[a].unit = unit;
args[a].bytes = bytes;
args.push_back(sort_arg(
a,
CPUS,
indexpos,
merges,
indexfds[i],
nmerges,
unit,
bytes));
}
for (size_t a = 0; a < CPUS; a++) {
if (pthread_create(&pthreads[a], NULL, run_sort, &args[a]) != 0) {
perror("pthread_create");
exit(EXIT_FAILURE);
@ -770,7 +776,7 @@ void radix1(int *geomfds_in, int *indexfds_in, int inputs, int prefix, int split
madvise(geommap, geomst.st_size, MADV_RANDOM);
madvise(geommap, geomst.st_size, MADV_WILLNEED);
merge(merges, nmerges, (unsigned char *) indexmap, indexfile, bytes, indexpos / bytes, geommap, geomfile, geompos_out, progress, progress_max, progress_reported, maxzoom, basezoom, droprate, gamma, ds);
merge(merges, nmerges, (unsigned char *) indexmap, indexfile, bytes, geommap, geomfile, geompos_out, progress, progress_max, progress_reported, maxzoom, gamma, ds);
madvise(indexmap, indexst.st_size, MADV_DONTNEED);
if (munmap(indexmap, indexst.st_size) < 0) {
@ -805,12 +811,12 @@ void radix1(int *geomfds_in, int *indexfds_in, int inputs, int prefix, int split
fwrite_check(geommap + ix.start, ix.end - ix.start, 1, geomfile, "geom");
*geompos_out += ix.end - ix.start;
int feature_minzoom = calc_feature_minzoom(&ix, ds, maxzoom, basezoom, droprate, gamma);
int feature_minzoom = calc_feature_minzoom(&ix, ds, maxzoom, gamma);
serialize_byte(geomfile, feature_minzoom, geompos_out, "merge geometry");
// Count this as an 75%-accomplishment, since we already 25%-counted it
*progress += (ix.end - ix.start) * 3 / 4;
if (!quiet && 100 * *progress / *progress_max != *progress_reported) {
if (!quiet && !quiet_progress && 100 * *progress / *progress_max != *progress_reported) {
fprintf(stderr, "Reordering geometry: %lld%% \r", 100 * *progress / *progress_max);
*progress_reported = 100 * *progress / *progress_max;
}
@ -877,7 +883,7 @@ void prep_drop_states(struct drop_state *ds, int maxzoom, int basezoom, double d
}
}
void radix(struct reader *readers, int nreaders, FILE *geomfile, int geomfd, FILE *indexfile, int indexfd, const char *tmpdir, long long *geompos, int maxzoom, int basezoom, double droprate, double gamma) {
void radix(std::vector<struct reader> &readers, int nreaders, FILE *geomfile, FILE *indexfile, const char *tmpdir, long long *geompos, int maxzoom, int basezoom, double droprate, double gamma) {
// Run through the index and geometry for each reader,
// splitting the contents out by index into as many
// sub-files as we can write to simultaneously.
@ -954,7 +960,7 @@ void radix(struct reader *readers, int nreaders, FILE *geomfile, int geomfd, FIL
}
}
void choose_first_zoom(long long *file_bbox, struct reader *readers, unsigned *iz, unsigned *ix, unsigned *iy, int minzoom, int buffer) {
void choose_first_zoom(long long *file_bbox, std::vector<struct reader> &readers, unsigned *iz, unsigned *ix, unsigned *iy, int minzoom, int buffer) {
for (size_t i = 0; i < CPUS; i++) {
if (readers[i].file_bbox[0] < file_bbox[0]) {
file_bbox[0] = readers[i].file_bbox[0];
@ -1008,7 +1014,8 @@ void choose_first_zoom(long long *file_bbox, struct reader *readers, unsigned *i
int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzoom, int basezoom, double basezoom_marker_width, sqlite3 *outdb, const char *outdir, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, json_object *filter, double droprate, int buffer, const char *tmpdir, double gamma, int read_parallel, int forcetable, const char *attribution, bool uses_gamma, long long *file_bbox, const char *prefilter, const char *postfilter, const char *description, bool guess_maxzoom, std::map<std::string, int> const *attribute_types, const char *pgm) {
int ret = EXIT_SUCCESS;
struct reader readers[CPUS];
std::vector<struct reader> readers;
readers.resize(CPUS);
for (size_t i = 0; i < CPUS; i++) {
struct reader *r = &readers[i];
@ -1215,7 +1222,8 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
long long layer_seq[CPUS];
double dist_sums[CPUS];
size_t dist_counts[CPUS];
struct serialization_state sst[CPUS];
std::vector<struct serialization_state> sst;
sst.resize(CPUS);
for (size_t i = 0; i < CPUS; i++) {
layer_seq[i] = overall_offset;
@ -1226,7 +1234,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
sst[i].line = 0;
sst[i].layer_seq = &layer_seq[i];
sst[i].progress_seq = &progress_seq;
sst[i].readers = readers;
sst[i].readers = &readers;
sst[i].segment = i;
sst[i].initial_x = &initial_x[i];
sst[i].initial_y = &initial_y[i];
@ -1246,7 +1254,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
sst[i].attribute_types = attribute_types;
}
parse_geobuf(sst, map, st.st_size, layer, sources[layer].layer);
parse_geobuf(&sst, map, st.st_size, layer, sources[layer].layer);
for (size_t i = 0; i < CPUS; i++) {
dist_sum += dist_sums[i];
@ -1263,7 +1271,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
}
overall_offset = layer_seq[0];
checkdisk(readers, CPUS);
checkdisk(&readers);
continue;
}
@ -1304,9 +1312,9 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
}
if (map != NULL && map != MAP_FAILED && read_parallel_this) {
do_read_parallel(map, st.st_size - off, overall_offset, reading.c_str(), readers, &progress_seq, exclude, include, exclude_all, filter, fname, basezoom, layer, nlayers, &layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, uses_gamma, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
do_read_parallel(map, st.st_size - off, overall_offset, reading.c_str(), &readers, &progress_seq, exclude, include, exclude_all, filter, basezoom, layer, &layermaps, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, uses_gamma, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
overall_offset += st.st_size - off;
checkdisk(readers, CPUS);
checkdisk(&readers);
if (munmap(map, st.st_size - off) != 0) {
perror("munmap source file");
@ -1380,11 +1388,11 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
}
fflush(readfp);
start_parsing(readfd, readfp, initial_offset, ahead, &is_parsing, &parallel_parser, parser_created, reading.c_str(), readers, &progress_seq, exclude, include, exclude_all, filter, fname, basezoom, layer, nlayers, layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, gamma != 0, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
start_parsing(readfd, readfp, initial_offset, ahead, &is_parsing, &parallel_parser, parser_created, reading.c_str(), &readers, &progress_seq, exclude, include, exclude_all, filter, basezoom, layer, layermaps, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, gamma != 0, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
initial_offset += ahead;
overall_offset += ahead;
checkdisk(readers, CPUS);
checkdisk(&readers);
ahead = 0;
sprintf(readname, "%s%s", tmpdir, "/read.XXXXXXXX");
@ -1417,7 +1425,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
fflush(readfp);
if (ahead > 0) {
start_parsing(readfd, readfp, initial_offset, ahead, &is_parsing, &parallel_parser, parser_created, reading.c_str(), readers, &progress_seq, exclude, include, exclude_all, filter, fname, basezoom, layer, nlayers, layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, gamma != 0, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
start_parsing(readfd, readfp, initial_offset, ahead, &is_parsing, &parallel_parser, parser_created, reading.c_str(), &readers, &progress_seq, exclude, include, exclude_all, filter, basezoom, layer, layermaps, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, gamma != 0, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
if (parser_created) {
if (pthread_join(parallel_parser, NULL) != 0) {
@ -1427,7 +1435,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
}
overall_offset += ahead;
checkdisk(readers, CPUS);
checkdisk(&readers);
}
} else {
// Plain serial reading
@ -1440,7 +1448,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
sst.line = 0;
sst.layer_seq = &layer_seq;
sst.progress_seq = &progress_seq;
sst.readers = readers;
sst.readers = &readers;
sst.segment = 0;
sst.initial_x = &initial_x[0];
sst.initial_y = &initial_y[0];
@ -1462,7 +1470,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
parse_json(&sst, jp, layer, sources[layer].layer);
json_end(jp);
overall_offset = layer_seq;
checkdisk(readers, CPUS);
checkdisk(&readers);
}
if (fclose(fp) != 0) {
@ -1654,7 +1662,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
serialize_uint(geomfile, ix, &geompos, fname);
serialize_uint(geomfile, iy, &geompos, fname);
radix(readers, CPUS, geomfile, geomfd, indexfile, indexfd, tmpdir, &geompos, maxzoom, basezoom, droprate, gamma);
radix(readers, CPUS, geomfile, indexfile, tmpdir, &geompos, maxzoom, basezoom, droprate, gamma);
/* end of tile */
serialize_byte(geomfile, -2, &geompos, fname);
@ -1705,15 +1713,15 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
long long progress = -1;
long long ip;
for (ip = 1; ip < indices; ip++) {
if (map[ip].index != map[ip - 1].index) {
if (map[ip].ix != map[ip - 1].ix) {
count++;
sum += log(map[ip].index - map[ip - 1].index);
sum += log(map[ip].ix - map[ip - 1].ix);
}
long long nprogress = 100 * ip / indices;
if (nprogress != progress) {
progress = nprogress;
if (!quiet) {
if (!quiet && !quiet_progress) {
fprintf(stderr, "Maxzoom: %lld%% \r", progress);
}
}
@ -1804,12 +1812,12 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
long long ip;
for (ip = 0; ip < indices; ip++) {
unsigned xx, yy;
decode(map[ip].index, &xx, &yy);
decode(map[ip].ix, &xx, &yy);
long long nprogress = 100 * ip / indices;
if (nprogress != progress) {
progress = nprogress;
if (!quiet) {
if (!quiet && !quiet_progress) {
fprintf(stderr, "Base zoom/drop rate: %lld%% \r", progress);
}
}
@ -1839,7 +1847,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
tile[z].fullcount++;
if (manage_gap(map[ip].index, &tile[z].previndex, scale, gamma, &tile[z].gap)) {
if (manage_gap(map[ip].ix, &tile[z].previndex, scale, gamma, &tile[z].gap)) {
continue;
}
@ -1968,7 +1976,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
if (ip > 0 && map[ip].start != map[ip - 1].end) {
fprintf(stderr, "Mismatched index at %lld: %lld vs %lld\n", ip, map[ip].start, map[ip].end);
}
int feature_minzoom = calc_feature_minzoom(&map[ip], ds, maxzoom, basezoom, droprate, gamma);
int feature_minzoom = calc_feature_minzoom(&map[ip], ds, maxzoom, gamma);
geom[map[ip].end - 1] = feature_minzoom;
}
@ -2002,7 +2010,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
}
unsigned midx = 0, midy = 0;
int written = traverse_zooms(fd, size, meta, stringpool, &midx, &midy, maxzoom, minzoom, basezoom, outdb, outdir, droprate, buffer, fname, tmpdir, gamma, full_detail, low_detail, min_detail, meta_off, pool_off, initial_x, initial_y, simplification, layermaps, prefilter, postfilter);
int written = traverse_zooms(fd, size, meta, stringpool, &midx, &midy, maxzoom, minzoom, outdb, outdir, buffer, fname, tmpdir, gamma, full_detail, low_detail, min_detail, meta_off, pool_off, initial_x, initial_y, simplification, layermaps, prefilter, postfilter);
if (maxzoom != written) {
fprintf(stderr, "\n\n\n*** NOTE TILES ONLY COMPLETE THROUGH ZOOM %d ***\n\n\n", written);
@ -2256,6 +2264,7 @@ int main(int argc, char **argv) {
{"Progress indicator", 0, 0, 0},
{"quiet", no_argument, 0, 'q'},
{"no-progress-indicator", no_argument, 0, 'Q'},
{"version", no_argument, 0, 'v'},
{"", 0, 0, 0},
@ -2479,6 +2488,10 @@ int main(int argc, char **argv) {
quiet = 1;
break;
case 'Q':
quiet_progress = 1;
break;
case 'p': {
char *cp;
for (cp = optarg; *cp != '\0'; cp++) {

View File

@ -4,18 +4,24 @@
#include <stddef.h>
struct index {
long long start;
long long end;
unsigned long long index;
short segment;
long long start = 0;
long long end = 0;
unsigned long long ix = 0;
short segment = 0;
unsigned short t : 2;
unsigned long long seq : (64 - 18); // pack with segment and t to stay in 32 bytes
index()
: t(0),
seq(0) {
}
};
void checkdisk(struct reader *r, int nreader);
void checkdisk(std::vector<struct reader> *r);
extern int geometry_scale;
extern int quiet;
extern int quiet_progress;
extern size_t CPUS;
extern size_t TEMP_FILES;

View File

@ -93,13 +93,15 @@ delete the file that already exists with that name.
If you aren't sure what the right maxzoom is for your data, \fB\fC\-zg\fR will guess one for you
based on the density of features.
.PP
If you are mapping point features, you will often want to use \fB\fC\-Bg\fR to automatically choose
a base zoom level for dot dropping. If that doesn't work out for you, try
\fB\fC\-r1 \-\-drop\-fraction\-as\-needed\fR to turn off the normal dot dropping and instead
only drop features if the tiles get too big.
Tippecanoe will normally drop a fraction of point features at zooms below the maxzoom,
to keep the low\-zoom tiles from getting too big. If you have a smaller data set where
all the points would fit without dropping any of them, use \fB\fC\-r1\fR to keep them all.
If you do want point dropping, but you still want the tiles to be denser than \fB\fC\-zg\fR
thinks they should be, use \fB\fC\-B\fR to set a basezoom lower than the maxzoom.
.PP
If you are mapping points or polygons, you will often want to use \fB\fC\-\-drop\-densest\-as\-needed\fR
to drop some of them if necessary to make the low zoom levels work.
If some of your tiles are coming out too big in spite of the settings above, you will
often want to use \fB\fC\-\-drop\-densest\-as\-needed\fR to drop whatever fraction of the features
is necessary at each zoom level to make that zoom level's tiles work.
.PP
If your features have a lot of attributes, use \fB\fC\-y\fR to keep only the ones you really need.
.PP
@ -319,7 +321,9 @@ If you don't specify, it will use \fB\fC/tmp\fR\&.
.SS Progress indicator
.RS
.IP \(bu 2
\fB\fC\-q\fR or \fB\fC\-\-quiet\fR: Work quietly instead of reporting progress
\fB\fC\-q\fR or \fB\fC\-\-quiet\fR: Work quietly instead of reporting progress or warning messages
.IP \(bu 2
\fB\fC\-Q\fR or \fB\fC\-\-no\-progress\-indicator\fR: Don't report progress, but still give warnings
.IP \(bu 2
\fB\fC\-v\fR or \fB\fC\-\-version\fR: Report Tippecanoe's version number
.RE

View File

@ -6,25 +6,25 @@
#include "mvt.hpp"
struct type_and_string {
int type;
std::string string;
int type = 0;
std::string string = "";
bool operator<(const type_and_string &o) const;
bool operator!=(const type_and_string &o) const;
};
struct type_and_string_stats {
std::vector<type_and_string> sample_values; // sorted
std::vector<type_and_string> sample_values = std::vector<type_and_string>(); // sorted
double min = INFINITY;
double max = -INFINITY;
int type = 0;
};
struct layermap_entry {
size_t id;
std::map<std::string, type_and_string_stats> file_keys;
int minzoom;
int maxzoom;
size_t id = 0;
std::map<std::string, type_and_string_stats> file_keys{};
int minzoom = 0;
int maxzoom = 0;
size_t points = 0;
size_t lines = 0;

View File

@ -2,11 +2,11 @@
#define MEMFILE_HPP
struct memfile {
int fd;
char *map;
long long len;
long long off;
unsigned long tree;
int fd = 0;
char *map = NULL;
long long len = 0;
long long off = 0;
unsigned long tree = 0;
};
struct memfile *memfile_open(int fd);

40
mvt.hpp
View File

@ -17,9 +17,9 @@ enum mvt_operation {
};
struct mvt_geometry {
long long x;
long long y;
int /* mvt_operation */ op;
long long x = 0;
long long y = 0;
int /* mvt_operation */ op = 0;
mvt_geometry(int op, long long x, long long y);
@ -43,11 +43,11 @@ enum mvt_geometry_type {
};
struct mvt_feature {
std::vector<unsigned> tags;
std::vector<mvt_geometry> geometry;
int /* mvt_geometry_type */ type;
unsigned long long id;
bool has_id;
std::vector<unsigned> tags{};
std::vector<mvt_geometry> geometry{};
int /* mvt_geometry_type */ type = 0;
unsigned long long id = 0;
bool has_id = false;
mvt_feature() {
has_id = false;
@ -80,26 +80,32 @@ struct mvt_value {
bool operator<(const mvt_value &o) const;
std::string toString();
mvt_value() {
this->type = mvt_double;
this->string_value = "";
this->numeric_value.double_value = 0;
}
};
struct mvt_layer {
int version;
std::string name;
std::vector<mvt_feature> features;
std::vector<std::string> keys;
std::vector<mvt_value> values;
long long extent;
int version = 0;
std::string name = "";
std::vector<mvt_feature> features{};
std::vector<std::string> keys{};
std::vector<mvt_value> values{};
long long extent = 0;
// Add a key-value pair to a feature, using this layer's constant pool
void tag(mvt_feature &feature, std::string key, mvt_value value);
// For tracking the key-value constants already used in this layer
std::map<std::string, size_t> key_map;
std::map<mvt_value, size_t> value_map;
std::map<std::string, size_t> key_map{};
std::map<mvt_value, size_t> value_map{};
};
struct mvt_tile {
std::vector<mvt_layer> layers;
std::vector<mvt_layer> layers{};
std::string encode();
bool decode(std::string &message, bool &was_compressed);

View File

@ -198,7 +198,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
}
if (mb_geometry[t] == VT_POLYGON) {
dv = clean_or_clip_poly(dv, 0, 0, 0, false);
dv = clean_or_clip_poly(dv, 0, 0, false);
if (dv.size() < 3) {
dv.clear();
}
@ -257,7 +257,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
int tp = -1;
std::string s;
stringify_value(properties->values[i], tp, s, "Filter output", jp->line, j, "");
stringify_value(properties->values[i], tp, s, "Filter output", jp->line, j);
if (tp >= 0) {
mvt_value v = stringified_to_mvt_value(tp, s.c_str());
l->second.tag(feature, std::string(properties->keys[i]->string), v);
@ -486,7 +486,7 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
serial_val v;
v.type = -1;
stringify_value(properties->values[i], v.type, v.s, "Filter output", jp->line, j, "");
stringify_value(properties->values[i], v.type, v.s, "Filter output", jp->line, j);
if (v.type >= 0) {
sf.full_keys.push_back(std::string(properties->keys[i]->string));

View File

@ -2,9 +2,9 @@
#define POOL_HPP
struct stringpool {
unsigned long left;
unsigned long right;
unsigned long off;
unsigned long left = 0;
unsigned long right = 0;
unsigned long off = 0;
};
long long addpool(struct memfile *poolfile, struct memfile *treefile, const char *s, char type);

View File

@ -7,7 +7,7 @@
struct projection projections[] = {
{"EPSG:4326", lonlat2tile, tile2lonlat, "urn:ogc:def:crs:OGC:1.3:CRS84"},
{"EPSG:3857", epsg3857totile, tiletoepsg3857, "urn:ogc:def:crs:EPSG::3857"},
{NULL, NULL},
{NULL, NULL, NULL, NULL},
};
struct projection *projection = &projections[0];

View File

@ -129,7 +129,7 @@ void canonicalize(json_object *o) {
}
}
void stringify_value(json_object *value, int &type, std::string &stringified, const char *reading, int line, json_object *feature, std::string const &key) {
void stringify_value(json_object *value, int &type, std::string &stringified, const char *reading, int line, json_object *feature) {
if (value != NULL) {
int vt = value->type;
std::string val;

View File

@ -13,4 +13,4 @@ extern int mb_geometry[GEOM_TYPES];
void json_context(json_object *j);
void parse_geometry(int t, json_object *j, drawvec &out, int op, const char *fname, int line, json_object *feature);
void stringify_value(json_object *value, int &type, std::string &stringified, const char *reading, int line, json_object *feature, std::string const &key);
void stringify_value(json_object *value, int &type, std::string &stringified, const char *reading, int line, json_object *feature);

View File

@ -379,7 +379,7 @@ static long long scale_geometry(struct serialization_state *sst, long long *bbox
}
int serialize_feature(struct serialization_state *sst, serial_feature &sf) {
struct reader *r = &(sst->readers[sst->segment]);
struct reader *r = &(*sst->readers)[sst->segment];
sf.bbox[0] = LLONG_MAX;
sf.bbox[1] = LLONG_MAX;
@ -611,7 +611,7 @@ int serialize_feature(struct serialization_state *sst, serial_feature &sf) {
index.segment = sst->segment;
index.seq = *(sst->layer_seq);
index.t = sf.t;
index.index = bbox_index;
index.ix = bbox_index;
fwrite_check(&index, sizeof(struct index), 1, r->indexfile, sst->fname);
r->indexpos += sizeof(struct index);
@ -628,8 +628,8 @@ int serialize_feature(struct serialization_state *sst, serial_feature &sf) {
}
if (*(sst->progress_seq) % 10000 == 0) {
checkdisk(sst->readers, CPUS);
if (!quiet) {
checkdisk(sst->readers);
if (!quiet && !quiet_progress) {
fprintf(stderr, "Read %.2f million features\r", *sst->progress_seq / 1000000.0);
}
}

View File

@ -31,102 +31,102 @@ int deserialize_uint_io(FILE *f, unsigned *n, long long *geompos);
int deserialize_byte_io(FILE *f, signed char *n, long long *geompos);
struct serial_val {
int type;
std::string s;
int type = 0;
std::string s = "";
};
struct serial_feature {
long long layer;
int segment;
long long seq;
long long layer = 0;
int segment = 0;
long long seq = 0;
signed char t;
signed char feature_minzoom;
signed char t = 0;
signed char feature_minzoom = 0;
bool has_id;
unsigned long long id;
bool has_id = false;
unsigned long long id = 0;
bool has_tippecanoe_minzoom;
int tippecanoe_minzoom;
bool has_tippecanoe_minzoom = false;
int tippecanoe_minzoom = 0;
bool has_tippecanoe_maxzoom;
int tippecanoe_maxzoom;
bool has_tippecanoe_maxzoom = false;
int tippecanoe_maxzoom = 0;
drawvec geometry;
unsigned long long index;
long long extent;
drawvec geometry = drawvec();
unsigned long long index = 0;
long long extent = 0;
size_t m;
std::vector<long long> keys;
std::vector<long long> values;
long long metapos;
size_t m = 0;
std::vector<long long> keys{};
std::vector<long long> values{};
long long metapos = 0;
// XXX This isn't serialized. Should it be here?
long long bbox[4];
std::vector<std::string> full_keys;
std::vector<serial_val> full_values;
std::string layername;
long long bbox[4] = {0, 0, 0, 0};
std::vector<std::string> full_keys{};
std::vector<serial_val> full_values{};
std::string layername = "";
};
void serialize_feature(FILE *geomfile, serial_feature *sf, long long *geompos, const char *fname, long long wx, long long wy, bool include_minzoom);
serial_feature deserialize_feature(FILE *geoms, long long *geompos_in, char *metabase, long long *meta_off, unsigned z, unsigned tx, unsigned ty, unsigned *initial_x, unsigned *initial_y);
struct reader {
int metafd;
int poolfd;
int treefd;
int geomfd;
int indexfd;
int metafd = -1;
int poolfd = -1;
int treefd = -1;
int geomfd = -1;
int indexfd = -1;
FILE *metafile;
struct memfile *poolfile;
struct memfile *treefile;
FILE *geomfile;
FILE *indexfile;
FILE *metafile = NULL;
struct memfile *poolfile = NULL;
struct memfile *treefile = NULL;
FILE *geomfile = NULL;
FILE *indexfile = NULL;
long long metapos;
long long geompos;
long long indexpos;
long long metapos = 0;
long long geompos = 0;
long long indexpos = 0;
long long file_bbox[4];
long long file_bbox[4] = {0, 0, 0, 0};
struct stat geomst;
struct stat metast;
struct stat geomst {};
struct stat metast {};
char *geom_map;
char *geom_map = NULL;
};
struct serialization_state {
const char *fname; // source file name
int line; // user-oriented location within source for error reports
const char *fname = NULL; // source file name
int line = 0; // user-oriented location within source for error reports
volatile long long *layer_seq; // sequence within current layer
volatile long long *progress_seq; // overall sequence for progress indicator
volatile long long *layer_seq = NULL; // sequence within current layer
volatile long long *progress_seq = NULL; // overall sequence for progress indicator
struct reader *readers; // array of data for each input thread
int segment; // the current input thread
std::vector<struct reader> *readers = NULL; // array of data for each input thread
int segment = 0; // the current input thread
unsigned *initial_x; // relative offset of all geometries
unsigned *initial_y;
int *initialized;
unsigned *initial_x = NULL; // relative offset of all geometries
unsigned *initial_y = NULL;
int *initialized = NULL;
double *dist_sum; // running tally for calculation of resolution within features
size_t *dist_count;
bool want_dist;
double *dist_sum = NULL; // running tally for calculation of resolution within features
size_t *dist_count = NULL;
bool want_dist = false;
int maxzoom;
int basezoom;
int maxzoom = 0;
int basezoom = 0;
bool filters;
bool uses_gamma;
bool filters = false;
bool uses_gamma = false;
std::map<std::string, layermap_entry> *layermap;
std::map<std::string, layermap_entry> *layermap = NULL;
std::map<std::string, int> const *attribute_types;
std::set<std::string> *exclude;
std::set<std::string> *include;
int exclude_all;
json_object *filter;
std::map<std::string, int> const *attribute_types = NULL;
std::set<std::string> *exclude = NULL;
std::set<std::string> *include = NULL;
int exclude_all = 0;
json_object *filter = NULL;
};
int serialize_feature(struct serialization_state *sst, serial_feature &sf);

View File

@ -322,20 +322,20 @@ double max(double a, double b) {
}
struct reader {
long long zoom;
long long x;
long long sorty;
long long y;
int pbf_count;
int z_flag;
long long zoom = 0;
long long x = 0;
long long sorty = 0;
long long y = 0;
int pbf_count = 0;
int z_flag = 0;
std::string data;
std::vector<std::string> pbf_path;
std::vector<std::string> large_zoom;
std::string data = "";
std::vector<std::string> pbf_path{};
std::vector<std::string> large_zoom{};
sqlite3 *db;
sqlite3_stmt *stmt;
struct reader *next;
sqlite3 *db = NULL;
sqlite3_stmt *stmt = NULL;
struct reader *next = NULL;
bool operator<(const struct reader &r) const {
if (zoom < r.zoom) {
@ -569,10 +569,10 @@ struct zxy {
long long x;
long long y;
zxy(long long _z, long long _x, long long _y) {
z = _z;
x = _x;
y = _y;
zxy(long long _z, long long _x, long long _y)
: z(_z),
x(_x),
y(_y) {
}
bool operator<(zxy const &other) const {
@ -599,18 +599,18 @@ struct zxy {
};
struct arg {
std::map<zxy, std::vector<std::string>> inputs;
std::map<zxy, std::string> outputs;
std::map<zxy, std::vector<std::string>> inputs{};
std::map<zxy, std::string> outputs{};
std::map<std::string, layermap_entry> *layermap;
std::map<std::string, layermap_entry> *layermap = NULL;
std::vector<std::string> *header;
std::map<std::string, std::vector<std::string>> *mapping;
std::set<std::string> *exclude;
std::set<std::string> *keep_layers;
std::set<std::string> *remove_layers;
int ifmatched;
json_object *filter;
std::vector<std::string> *header = NULL;
std::map<std::string, std::vector<std::string>> *mapping = NULL;
std::set<std::string> *exclude = NULL;
std::set<std::string> *keep_layers = NULL;
std::set<std::string> *remove_layers = NULL;
int ifmatched = 0;
json_object *filter = NULL;
};
void *join_worker(void *v) {
@ -711,7 +711,7 @@ void handle_tasks(std::map<zxy, std::vector<std::string>> &tasks, std::vector<st
}
}
void decode(struct reader *readers, char *map, std::map<std::string, layermap_entry> &layermap, sqlite3 *outdb, const char *outdir, struct stats *st, std::vector<std::string> &header, std::map<std::string, std::vector<std::string>> &mapping, std::set<std::string> &exclude, int ifmatched, std::string &attribution, std::string &description, std::set<std::string> &keep_layers, std::set<std::string> &remove_layers, std::string &name, json_object *filter) {
void decode(struct reader *readers, std::map<std::string, layermap_entry> &layermap, sqlite3 *outdb, const char *outdir, struct stats *st, std::vector<std::string> &header, std::map<std::string, std::vector<std::string>> &mapping, std::set<std::string> &exclude, int ifmatched, std::string &attribution, std::string &description, std::set<std::string> &keep_layers, std::set<std::string> &remove_layers, std::string &name, json_object *filter) {
std::vector<std::map<std::string, layermap_entry>> layermaps;
for (size_t i = 0; i < CPUS; i++) {
layermaps.push_back(std::map<std::string, layermap_entry>());
@ -1205,7 +1205,7 @@ int main(int argc, char **argv) {
*rr = r;
}
decode(readers, csv, layermap, outdb, out_dir, &st, header, mapping, exclude, ifmatched, attribution, description, keep_layers, remove_layers, name, filter);
decode(readers, layermap, outdb, out_dir, &st, header, mapping, exclude, ifmatched, attribution, description, keep_layers, remove_layers, name, filter);
if (set_attribution.size() != 0) {
attribution = set_attribution;

311
tile.cpp
View File

@ -76,21 +76,21 @@ int metacmp(int m1, const std::vector<long long> &keys1, const std::vector<long
int coalindexcmp(const struct coalesce *c1, const struct coalesce *c2);
struct coalesce {
char *stringpool;
std::vector<long long> keys;
std::vector<long long> values;
std::vector<std::string> full_keys;
std::vector<serial_val> full_values;
drawvec geom;
unsigned long long index;
unsigned long long index2;
long long original_seq;
int type;
int m;
bool coalesced;
double spacing;
bool has_id;
unsigned long long id;
char *stringpool = NULL;
std::vector<long long> keys = std::vector<long long>();
std::vector<long long> values = std::vector<long long>();
std::vector<std::string> full_keys = std::vector<std::string>();
std::vector<serial_val> full_values = std::vector<serial_val>();
drawvec geom = drawvec();
unsigned long long index = 0;
unsigned long long index2 = 0;
long long original_seq = 0;
int type = 0;
int m = 0;
bool coalesced = false;
double spacing = 0;
bool has_id = false;
unsigned long long id = 0;
bool operator<(const coalesce &o) const {
int cmp = coalindexcmp(this, &o);
@ -200,7 +200,7 @@ int metacmp(int m1, const std::vector<long long> &keys1, const std::vector<long
}
}
void rewrite(drawvec &geom, int z, int nextzoom, int maxzoom, long long *bbox, unsigned tx, unsigned ty, int buffer, int line_detail, int *within, long long *geompos, FILE **geomfile, const char *fname, signed char t, int layer, long long metastart, signed char feature_minzoom, int child_shards, int max_zoom_increment, long long seq, int tippecanoe_minzoom, int tippecanoe_maxzoom, int segment, unsigned *initial_x, unsigned *initial_y, int m, std::vector<long long> &metakeys, std::vector<long long> &metavals, bool has_id, unsigned long long id, unsigned long long index, long long extent) {
void rewrite(drawvec &geom, int z, int nextzoom, int maxzoom, long long *bbox, unsigned tx, unsigned ty, int buffer, int *within, long long *geompos, FILE **geomfile, const char *fname, signed char t, int layer, long long metastart, signed char feature_minzoom, int child_shards, int max_zoom_increment, long long seq, int tippecanoe_minzoom, int tippecanoe_maxzoom, int segment, unsigned *initial_x, unsigned *initial_y, int m, std::vector<long long> &metakeys, std::vector<long long> &metavals, bool has_id, unsigned long long id, unsigned long long index, long long extent) {
if (geom.size() > 0 && (nextzoom <= maxzoom || additional[A_EXTEND_ZOOMS])) {
int xo, yo;
int span = 1 << (nextzoom - z);
@ -309,34 +309,34 @@ void rewrite(drawvec &geom, int z, int nextzoom, int maxzoom, long long *bbox, u
}
struct partial {
std::vector<drawvec> geoms;
std::vector<long long> keys;
std::vector<long long> values;
std::vector<std::string> full_keys;
std::vector<serial_val> full_values;
std::vector<ssize_t> arc_polygon;
long long layer;
long long original_seq;
unsigned long long index;
unsigned long long index2;
int m;
int segment;
bool reduced;
int z;
int line_detail;
int maxzoom;
double spacing;
double simplification;
signed char t;
unsigned long long id;
bool has_id;
ssize_t renamed;
std::vector<drawvec> geoms = std::vector<drawvec>();
std::vector<long long> keys = std::vector<long long>();
std::vector<long long> values = std::vector<long long>();
std::vector<std::string> full_keys = std::vector<std::string>();
std::vector<serial_val> full_values = std::vector<serial_val>();
std::vector<ssize_t> arc_polygon = std::vector<ssize_t>();
long long layer = 0;
long long original_seq = 0;
unsigned long long index = 0;
unsigned long long index2 = 0;
int m = 0;
int segment = 0;
bool reduced = 0;
int z = 0;
int line_detail = 0;
int maxzoom = 0;
double spacing = 0;
double simplification = 0;
signed char t = 0;
unsigned long long id = 0;
bool has_id = 0;
ssize_t renamed = 0;
};
struct partial_arg {
std::vector<struct partial> *partials;
int task;
int tasks;
std::vector<struct partial> *partials = NULL;
int task = 0;
int tasks = 0;
};
drawvec revive_polygon(drawvec &geom, double area, int z, int detail) {
@ -442,9 +442,9 @@ void *partial_feature_worker(void *v) {
// Give Clipper a chance to try to fix it.
for (size_t g = 0; g < geoms.size(); g++) {
drawvec before = geoms[g];
geoms[g] = clean_or_clip_poly(geoms[g], 0, 0, 0, false);
geoms[g] = clean_or_clip_poly(geoms[g], 0, 0, false);
if (additional[A_DEBUG_POLYGON]) {
check_polygon(geoms[g], before);
check_polygon(geoms[g]);
}
if (geoms[g].size() < 3) {
@ -523,11 +523,11 @@ static drawvec reverse_subring(drawvec const &dv) {
}
struct edge {
unsigned x1;
unsigned y1;
unsigned x2;
unsigned y2;
unsigned ring;
unsigned x1 = 0;
unsigned y1 = 0;
unsigned x2 = 0;
unsigned y2 = 0;
unsigned ring = 0;
edge(unsigned _x1, unsigned _y1, unsigned _x2, unsigned _y2, unsigned _ring) {
x1 = _x1;
@ -872,10 +872,10 @@ bool find_common_edges(std::vector<partial> &partials, int z, int line_detail, d
// If necessary, merge some adjacent polygons into some other polygons
struct merge_order {
ssize_t edge;
unsigned long long gap;
size_t p1;
size_t p2;
ssize_t edge = 0;
unsigned long long gap = 0;
size_t p1 = 0;
size_t p2 = 0;
bool operator<(const merge_order &m) const {
return gap < m.gap;
@ -1127,53 +1127,51 @@ long long choose_minextent(std::vector<long long> &extents, double f) {
}
struct write_tile_args {
struct task *tasks;
char *metabase;
char *stringpool;
int min_detail;
int basezoom;
sqlite3 *outdb;
const char *outdir;
double droprate;
int buffer;
const char *fname;
FILE **geomfile;
double todo;
volatile long long *along;
double gamma;
double gamma_out;
int child_shards;
int *geomfd;
off_t *geom_size;
volatile unsigned *midx;
volatile unsigned *midy;
int maxzoom;
int minzoom;
int full_detail;
int low_detail;
double simplification;
volatile long long *most;
long long *meta_off;
long long *pool_off;
unsigned *initial_x;
unsigned *initial_y;
volatile int *running;
int err;
std::vector<std::map<std::string, layermap_entry>> *layermaps;
std::vector<std::vector<std::string>> *layer_unmaps;
size_t pass;
size_t passes;
unsigned long long mingap;
unsigned long long mingap_out;
long long minextent;
long long minextent_out;
double fraction;
double fraction_out;
const char *prefilter;
const char *postfilter;
bool still_dropping;
int wrote_zoom;
size_t tiling_seg;
struct task *tasks = NULL;
char *metabase = NULL;
char *stringpool = NULL;
int min_detail = 0;
sqlite3 *outdb = NULL;
const char *outdir = NULL;
int buffer = 0;
const char *fname = NULL;
FILE **geomfile = NULL;
double todo = 0;
volatile long long *along = NULL;
double gamma = 0;
double gamma_out = 0;
int child_shards = 0;
int *geomfd = NULL;
off_t *geom_size = NULL;
volatile unsigned *midx = NULL;
volatile unsigned *midy = NULL;
int maxzoom = 0;
int minzoom = 0;
int full_detail = 0;
int low_detail = 0;
double simplification = 0;
volatile long long *most = NULL;
long long *meta_off = NULL;
long long *pool_off = NULL;
unsigned *initial_x = NULL;
unsigned *initial_y = NULL;
volatile int *running = NULL;
int err = 0;
std::vector<std::map<std::string, layermap_entry>> *layermaps = NULL;
std::vector<std::vector<std::string>> *layer_unmaps = NULL;
size_t pass = 0;
size_t passes = 0;
unsigned long long mingap = 0;
unsigned long long mingap_out = 0;
long long minextent = 0;
long long minextent_out = 0;
double fraction = 0;
double fraction_out = 0;
const char *prefilter = NULL;
const char *postfilter = NULL;
bool still_dropping = false;
int wrote_zoom = 0;
size_t tiling_seg = 0;
};
bool clip_to_tile(serial_feature &sf, int z, long long buffer) {
@ -1233,7 +1231,7 @@ bool clip_to_tile(serial_feature &sf, int z, long long buffer) {
// that are duplicated across the date line
if (prevent[P_DUPLICATION] && z != 0) {
if (point_within_tile((sf.bbox[0] + sf.bbox[2]) / 2, (sf.bbox[1] + sf.bbox[3]) / 2, z, buffer)) {
if (point_within_tile((sf.bbox[0] + sf.bbox[2]) / 2, (sf.bbox[1] + sf.bbox[3]) / 2, z)) {
// sf.geometry is unchanged
} else {
sf.geometry.clear();
@ -1252,7 +1250,7 @@ bool clip_to_tile(serial_feature &sf, int z, long long buffer) {
return false;
}
serial_feature next_feature(FILE *geoms, long long *geompos_in, char *metabase, long long *meta_off, int z, unsigned tx, unsigned ty, unsigned *initial_x, unsigned *initial_y, long long *original_features, long long *unclipped_features, int nextzoom, int maxzoom, int minzoom, int max_zoom_increment, size_t pass, size_t passes, volatile long long *along, long long alongminus, int buffer, int *within, bool *first_time, int line_detail, FILE **geomfile, long long *geompos, volatile double *oprogress, double todo, const char *fname, int child_shards) {
serial_feature next_feature(FILE *geoms, long long *geompos_in, char *metabase, long long *meta_off, int z, unsigned tx, unsigned ty, unsigned *initial_x, unsigned *initial_y, long long *original_features, long long *unclipped_features, int nextzoom, int maxzoom, int minzoom, int max_zoom_increment, size_t pass, size_t passes, volatile long long *along, long long alongminus, int buffer, int *within, bool *first_time, FILE **geomfile, long long *geompos, volatile double *oprogress, double todo, const char *fname, int child_shards) {
while (1) {
serial_feature sf = deserialize_feature(geoms, geompos_in, metabase, meta_off, z, tx, ty, initial_x, initial_y);
if (sf.t < 0) {
@ -1261,7 +1259,7 @@ serial_feature next_feature(FILE *geoms, long long *geompos_in, char *metabase,
double progress = floor(((((*geompos_in + *along - alongminus) / (double) todo) + (pass - (2 - passes))) / passes + z) / (maxzoom + 1) * 1000) / 10;
if (progress >= *oprogress + 0.1) {
if (!quiet) {
if (!quiet && !quiet_progress) {
fprintf(stderr, " %3.1f%% %d/%u/%u \r", progress, z, tx, ty);
}
*oprogress = progress;
@ -1279,7 +1277,7 @@ serial_feature next_feature(FILE *geoms, long long *geompos_in, char *metabase,
if (*first_time && pass == 1) { /* only write out the next zoom once, even if we retry */
if (sf.tippecanoe_maxzoom == -1 || sf.tippecanoe_maxzoom >= nextzoom) {
rewrite(sf.geometry, z, nextzoom, maxzoom, sf.bbox, tx, ty, buffer, line_detail, within, geompos, geomfile, fname, sf.t, sf.layer, sf.metapos, sf.feature_minzoom, child_shards, max_zoom_increment, sf.seq, sf.tippecanoe_minzoom, sf.tippecanoe_maxzoom, sf.segment, initial_x, initial_y, sf.m, sf.keys, sf.values, sf.has_id, sf.id, sf.index, sf.extent);
rewrite(sf.geometry, z, nextzoom, maxzoom, sf.bbox, tx, ty, buffer, within, geompos, geomfile, fname, sf.t, sf.layer, sf.metapos, sf.feature_minzoom, child_shards, max_zoom_increment, sf.seq, sf.tippecanoe_minzoom, sf.tippecanoe_maxzoom, sf.segment, initial_x, initial_y, sf.m, sf.keys, sf.values, sf.has_id, sf.id, sf.index, sf.extent);
}
}
@ -1302,46 +1300,45 @@ serial_feature next_feature(FILE *geoms, long long *geompos_in, char *metabase,
}
struct run_prefilter_args {
FILE *geoms;
long long *geompos_in;
char *metabase;
long long *meta_off;
int z;
unsigned tx;
unsigned ty;
unsigned *initial_x;
unsigned *initial_y;
long long *original_features;
long long *unclipped_features;
int nextzoom;
int maxzoom;
int minzoom;
int max_zoom_increment;
size_t pass;
size_t passes;
volatile long long *along;
long long alongminus;
int buffer;
int *within;
bool *first_time;
int line_detail;
FILE **geomfile;
long long *geompos;
volatile double *oprogress;
double todo;
const char *fname;
int child_shards;
std::vector<std::vector<std::string>> *layer_unmaps;
char *stringpool;
long long *pool_off;
FILE *prefilter_fp;
FILE *geoms = NULL;
long long *geompos_in = NULL;
char *metabase = NULL;
long long *meta_off = NULL;
int z = 0;
unsigned tx = 0;
unsigned ty = 0;
unsigned *initial_x = 0;
unsigned *initial_y = 0;
long long *original_features = 0;
long long *unclipped_features = 0;
int nextzoom = 0;
int maxzoom = 0;
int minzoom = 0;
int max_zoom_increment = 0;
size_t pass = 0;
size_t passes = 0;
volatile long long *along = 0;
long long alongminus = 0;
int buffer = 0;
int *within = NULL;
bool *first_time = NULL;
FILE **geomfile = NULL;
long long *geompos = NULL;
volatile double *oprogress = NULL;
double todo = 0;
const char *fname = 0;
int child_shards = 0;
std::vector<std::vector<std::string>> *layer_unmaps = NULL;
char *stringpool = NULL;
long long *pool_off = NULL;
FILE *prefilter_fp = NULL;
};
void *run_prefilter(void *v) {
run_prefilter_args *rpa = (run_prefilter_args *) v;
while (1) {
serial_feature sf = next_feature(rpa->geoms, rpa->geompos_in, rpa->metabase, rpa->meta_off, rpa->z, rpa->tx, rpa->ty, rpa->initial_x, rpa->initial_y, rpa->original_features, rpa->unclipped_features, rpa->nextzoom, rpa->maxzoom, rpa->minzoom, rpa->max_zoom_increment, rpa->pass, rpa->passes, rpa->along, rpa->alongminus, rpa->buffer, rpa->within, rpa->first_time, rpa->line_detail, rpa->geomfile, rpa->geompos, rpa->oprogress, rpa->todo, rpa->fname, rpa->child_shards);
serial_feature sf = next_feature(rpa->geoms, rpa->geompos_in, rpa->metabase, rpa->meta_off, rpa->z, rpa->tx, rpa->ty, rpa->initial_x, rpa->initial_y, rpa->original_features, rpa->unclipped_features, rpa->nextzoom, rpa->maxzoom, rpa->minzoom, rpa->max_zoom_increment, rpa->pass, rpa->passes, rpa->along, rpa->alongminus, rpa->buffer, rpa->within, rpa->first_time, rpa->geomfile, rpa->geompos, rpa->oprogress, rpa->todo, rpa->fname, rpa->child_shards);
if (sf.t < 0) {
break;
}
@ -1392,7 +1389,7 @@ void *run_prefilter(void *v) {
return NULL;
}
long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *stringpool, int z, unsigned tx, unsigned ty, int detail, int min_detail, int basezoom, sqlite3 *outdb, const char *outdir, double droprate, int buffer, const char *fname, FILE **geomfile, int minzoom, int maxzoom, double todo, volatile long long *along, long long alongminus, double gamma, int child_shards, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, volatile int *running, double simplification, std::vector<std::map<std::string, layermap_entry>> *layermaps, std::vector<std::vector<std::string>> *layer_unmaps, size_t tiling_seg, size_t pass, size_t passes, unsigned long long mingap, long long minextent, double fraction, const char *prefilter, const char *postfilter, write_tile_args *arg) {
long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *stringpool, int z, unsigned tx, unsigned ty, int detail, int min_detail, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, FILE **geomfile, int minzoom, int maxzoom, double todo, volatile long long *along, long long alongminus, double gamma, int child_shards, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, volatile int *running, double simplification, std::vector<std::map<std::string, layermap_entry>> *layermaps, std::vector<std::vector<std::string>> *layer_unmaps, size_t tiling_seg, size_t pass, size_t passes, unsigned long long mingap, long long minextent, double fraction, const char *prefilter, const char *postfilter, write_tile_args *arg) {
int line_detail;
double merge_fraction = 1;
double mingap_fraction = 1;
@ -1497,7 +1494,6 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
rpa.buffer = buffer;
rpa.within = within;
rpa.first_time = &first_time;
rpa.line_detail = line_detail;
rpa.geomfile = geomfile;
rpa.geompos = geompos;
rpa.oprogress = &oprogress;
@ -1526,7 +1522,7 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
serial_feature sf;
if (prefilter == NULL) {
sf = next_feature(geoms, geompos_in, metabase, meta_off, z, tx, ty, initial_x, initial_y, &original_features, &unclipped_features, nextzoom, maxzoom, minzoom, max_zoom_increment, pass, passes, along, alongminus, buffer, within, &first_time, line_detail, geomfile, geompos, &oprogress, todo, fname, child_shards);
sf = next_feature(geoms, geompos_in, metabase, meta_off, z, tx, ty, initial_x, initial_y, &original_features, &unclipped_features, nextzoom, maxzoom, minzoom, max_zoom_increment, pass, passes, along, alongminus, buffer, within, &first_time, geomfile, geompos, &oprogress, todo, fname, child_shards);
} else {
sf = parse_feature(prefilter_jp, z, tx, ty, layermaps, tiling_seg, layer_unmaps, postfilter != NULL);
}
@ -1684,7 +1680,8 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
}
pthread_t pthreads[tasks];
partial_arg args[tasks];
std::vector<partial_arg> args;
args.resize(tasks);
for (int i = 0; i < tasks; i++) {
args[i].task = i;
args[i].tasks = tasks;
@ -1809,7 +1806,7 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
if (layer_features[x].type == VT_POLYGON) {
if (layer_features[x].coalesced) {
layer_features[x].geom = clean_or_clip_poly(layer_features[x].geom, 0, 0, 0, false);
layer_features[x].geom = clean_or_clip_poly(layer_features[x].geom, 0, 0, false);
}
layer_features[x].geom = close_poly(layer_features[x].geom);
@ -1901,7 +1898,7 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
double progress = floor(((((*geompos_in + *along - alongminus) / (double) todo) + (pass - (2 - passes))) / passes + z) / (maxzoom + 1) * 1000) / 10;
if (progress >= oprogress + 0.1) {
if (!quiet) {
if (!quiet && !quiet_progress) {
fprintf(stderr, " %3.1f%% %d/%u/%u \r", progress, z, tx, ty);
}
oprogress = progress;
@ -2094,8 +2091,8 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
}
struct task {
int fileno;
struct task *next;
int fileno = 0;
struct task *next = NULL;
};
void *run_thread(void *vargs) {
@ -2138,7 +2135,7 @@ void *run_thread(void *vargs) {
// fprintf(stderr, "%d/%u/%u\n", z, x, y);
long long len = write_tile(geom, &geompos, arg->metabase, arg->stringpool, z, x, y, z == arg->maxzoom ? arg->full_detail : arg->low_detail, arg->min_detail, arg->basezoom, arg->outdb, arg->outdir, arg->droprate, arg->buffer, arg->fname, arg->geomfile, arg->minzoom, arg->maxzoom, arg->todo, arg->along, geompos, arg->gamma, arg->child_shards, arg->meta_off, arg->pool_off, arg->initial_x, arg->initial_y, arg->running, arg->simplification, arg->layermaps, arg->layer_unmaps, arg->tiling_seg, arg->pass, arg->passes, arg->mingap, arg->minextent, arg->fraction, arg->prefilter, arg->postfilter, arg);
long long len = write_tile(geom, &geompos, arg->metabase, arg->stringpool, z, x, y, z == arg->maxzoom ? arg->full_detail : arg->low_detail, arg->min_detail, arg->outdb, arg->outdir, arg->buffer, arg->fname, arg->geomfile, arg->minzoom, arg->maxzoom, arg->todo, arg->along, geompos, arg->gamma, arg->child_shards, arg->meta_off, arg->pool_off, arg->initial_x, arg->initial_y, arg->running, arg->simplification, arg->layermaps, arg->layer_unmaps, arg->tiling_seg, arg->pass, arg->passes, arg->mingap, arg->minextent, arg->fraction, arg->prefilter, arg->postfilter, arg);
if (len < 0) {
int *err = &arg->err;
@ -2203,7 +2200,7 @@ void *run_thread(void *vargs) {
return NULL;
}
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, unsigned *midx, unsigned *midy, int &maxzoom, int minzoom, int basezoom, sqlite3 *outdb, const char *outdir, double droprate, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry>> &layermaps, const char *prefilter, const char *postfilter) {
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, unsigned *midx, unsigned *midy, int &maxzoom, int minzoom, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry>> &layermaps, const char *prefilter, const char *postfilter) {
// The existing layermaps are one table per input thread.
// We need to add another one per *tiling* thread so that it can be
// safely changed during tiling.
@ -2281,12 +2278,17 @@ int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpo
// Assign temporary files to threads
struct task tasks[TEMP_FILES];
std::vector<struct task> tasks;
tasks.resize(TEMP_FILES);
struct dispatch {
struct task *tasks;
long long todo;
struct dispatch *next;
} dispatches[threads];
struct task *tasks = NULL;
long long todo = 0;
struct dispatch *next = NULL;
};
std::vector<struct dispatch> dispatches;
dispatches.resize(threads);
struct dispatch *dispatch_head = &dispatches[0];
for (size_t j = 0; j < threads; j++) {
dispatches[j].tasks = NULL;
@ -2336,7 +2338,8 @@ int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpo
for (size_t pass = start; pass < 2; pass++) {
pthread_t pthreads[threads];
write_tile_args args[threads];
std::vector<write_tile_args> args;
args.resize(threads);
int running = threads;
long long along = 0;
@ -2344,10 +2347,8 @@ int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpo
args[thread].metabase = metabase;
args[thread].stringpool = stringpool;
args[thread].min_detail = min_detail;
args[thread].basezoom = basezoom;
args[thread].outdb = outdb; // locked with db_lock
args[thread].outdir = outdir;
args[thread].droprate = droprate;
args[thread].buffer = buffer;
args[thread].fname = fname;
args[thread].geomfile = sub + thread * (TEMP_FILES / threads);

View File

@ -9,7 +9,7 @@
long long write_tile(char **geom, char *metabase, char *stringpool, unsigned *file_bbox, int z, unsigned x, unsigned y, int detail, int min_detail, int basezoom, sqlite3 *outdb, const char *outdir, double droprate, int buffer, const char *fname, FILE **geomfile, int file_minzoom, int file_maxzoom, double todo, char *geomstart, long long along, double gamma, int nlayers);
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, unsigned *midx, unsigned *midy, int &maxzoom, int minzoom, int basezoom, sqlite3 *outdb, const char *outdir, double droprate, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry> > &layermap, const char *prefilter, const char *postfilter);
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, unsigned *midx, unsigned *midy, int &maxzoom, int minzoom, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry> > &layermap, const char *prefilter, const char *postfilter);
int manage_gap(unsigned long long index, unsigned long long *previndex, double scale, double gamma, double *gap);

View File

@ -1,6 +1,6 @@
#ifndef VERSION_HPP
#define VERSION_HPP
#define VERSION "tippecanoe v1.26.6\n"
#define VERSION "tippecanoe v1.26.7\n"
#endif

View File

@ -16,12 +16,12 @@ struct lonlat {
long long x;
long long y;
lonlat(int nop, double nlon, double nlat, long long nx, long long ny) {
this->op = nop;
this->lon = nlon;
this->lat = nlat;
this->x = nx;
this->y = ny;
lonlat(int nop, double nlon, double nlat, long long nx, long long ny)
: op(nop),
lon(nlon),
lat(nlat),
x(nx),
y(ny) {
}
};