mirror of
https://github.com/mapbox/tippecanoe.git
synced 2025-01-22 04:18:01 +00:00
Add an option to discover the minimum workable gap between features
This commit is contained in:
parent
db859e8801
commit
10fc9254d1
@ -127,8 +127,9 @@ resolution is obtained than by using a smaller _maxzoom_ or _detail_.
|
||||
* -ap or --drop-polygons: Let "dot" dropping at lower zooms apply to polygons too
|
||||
* -ag or --calculate-feature-density: Add a new attribute, `tippecanoe_feature_density`, to each feature, to record how densely features are spaced in that area of the tile. You can use this attribute in the style to produce a glowing effect where points are densely packed. It can range from 0 in the sparsest areas to 255 in the densest.
|
||||
* -ab or --detect-shared-borders: In the manner of [TopoJSON](https://github.com/mbostock/topojson/wiki/Introduction), detect borders that are shared between multiple polygons and simplify them identically in each polygon. This takes more time and memory than considering each polygon individually.
|
||||
* -aG or --increase-gamma-as-needed: If a tile is too large, try to reduce it to under 500K by increasing the `-g` gamma
|
||||
* -aG or --increase-gamma-as-needed: If a tile is too large, try to reduce it to under 500K by increasing the `-g` gamma. The discovered gamma applies to the entire zoom level.
|
||||
* -am or --merge-polygons-as-needed: If a tile is too large, try to reduce it to under 500K by merging adjacent polygons together
|
||||
* -as or --increase-spacing-as-needed: If a tile is too large, try to reduce it to under 500K by increasing the minimum spacing between features. The discovered spacing applies to the entire zoom level.
|
||||
|
||||
### Doing less
|
||||
|
||||
|
1
main.cpp
1
main.cpp
@ -1915,6 +1915,7 @@ int main(int argc, char **argv) {
|
||||
{"detect-shared-borders", no_argument, &additional[A_DETECT_SHARED_BORDERS], 1},
|
||||
{"increase-gamma-as-needed", no_argument, &additional[A_INCREASE_GAMMA_AS_NEEDED], 1},
|
||||
{"merge-polygons-as-needed", no_argument, &additional[A_MERGE_POLYGONS_AS_NEEDED], 1},
|
||||
{"increase-spacing-as-needed", no_argument, &additional[A_INCREASE_SPACING_AS_NEEDED], 1},
|
||||
|
||||
{"no-line-simplification", no_argument, &prevent[P_SIMPLIFY], 1},
|
||||
{"simplify-only-low-zooms", no_argument, &prevent[P_SIMPLIFY_LOW], 1},
|
||||
|
@ -153,9 +153,11 @@ which may not be what you want.
|
||||
.IP \(bu 2
|
||||
\-ab or \-\-detect\-shared\-borders: In the manner of TopoJSON \[la]https://github.com/mbostock/topojson/wiki/Introduction\[ra], detect borders that are shared between multiple polygons and simplify them identically in each polygon. This takes more time and memory than considering each polygon individually.
|
||||
.IP \(bu 2
|
||||
\-aG or \-\-increase\-gamma\-as\-needed: If a tile is too large, try to reduce it to under 500K by increasing the \fB\fC\-g\fR gamma
|
||||
\-aG or \-\-increase\-gamma\-as\-needed: If a tile is too large, try to reduce it to under 500K by increasing the \fB\fC\-g\fR gamma. The discovered gamma applies to the entire zoom level.
|
||||
.IP \(bu 2
|
||||
\-am or \-\-merge\-polygons\-as\-needed: If a tile is too large, try to reduce it to under 500K by merging adjacent polygons together
|
||||
.IP \(bu 2
|
||||
\-as or \-\-increase\-spacing\-as\-needed: If a tile is too large, try to reduce it to under 500K by increasing the minimum spacing between features. The discovered spacing applies to the entire zoom level.
|
||||
.RE
|
||||
.SS Doing less
|
||||
.RS
|
||||
|
@ -9,6 +9,7 @@
|
||||
#define A_CALCULATE_FEATURE_DENSITY ((int) 'g')
|
||||
#define A_INCREASE_GAMMA_AS_NEEDED ((int) 'G')
|
||||
#define A_MERGE_POLYGONS_AS_NEEDED ((int) 'm')
|
||||
#define A_INCREASE_SPACING_AS_NEEDED ((int) 's')
|
||||
|
||||
#define P_SIMPLIFY ((int) 's')
|
||||
#define P_SIMPLIFY_LOW ((int) 'S')
|
||||
|
50
tile.cpp
50
tile.cpp
@ -1175,12 +1175,14 @@ struct write_tile_args {
|
||||
std::vector<std::map<std::string, layermap_entry>> *layermaps;
|
||||
std::vector<std::vector<std::string>> *layer_unmaps;
|
||||
size_t pass;
|
||||
unsigned long long mingap;
|
||||
};
|
||||
|
||||
long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *stringpool, int z, unsigned tx, unsigned ty, int detail, int min_detail, int basezoom, sqlite3 *outdb, double droprate, int buffer, const char *fname, FILE **geomfile, int minzoom, int maxzoom, double todo, volatile long long *along, long long alongminus, double gamma, int child_shards, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, volatile int *running, double simplification, std::vector<std::map<std::string, layermap_entry>> *layermaps, std::vector<std::vector<std::string>> *layer_unmaps, size_t pass, write_tile_args *arg) {
|
||||
long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *stringpool, int z, unsigned tx, unsigned ty, int detail, int min_detail, int basezoom, sqlite3 *outdb, double droprate, int buffer, const char *fname, FILE **geomfile, int minzoom, int maxzoom, double todo, volatile long long *along, long long alongminus, double gamma, int child_shards, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, volatile int *running, double simplification, std::vector<std::map<std::string, layermap_entry>> *layermaps, std::vector<std::vector<std::string>> *layer_unmaps, size_t pass, unsigned long long mingap, write_tile_args *arg) {
|
||||
int line_detail;
|
||||
double fraction = 1;
|
||||
double merge_fraction = 1;
|
||||
double mingap_fraction = 1;
|
||||
|
||||
long long og = *geompos_in;
|
||||
|
||||
@ -1231,6 +1233,7 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
|
||||
std::vector<struct partial> partials;
|
||||
std::map<std::string, std::vector<coalesce>> layers;
|
||||
std::vector<unsigned long long> indices;
|
||||
|
||||
int within[child_shards];
|
||||
long long geompos[child_shards];
|
||||
@ -1415,7 +1418,7 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
}
|
||||
|
||||
unsigned long long index = 0;
|
||||
if (additional[A_CALCULATE_FEATURE_DENSITY] || gamma > 0 || additional[A_MERGE_POLYGONS_AS_NEEDED]) {
|
||||
if (additional[A_CALCULATE_FEATURE_DENSITY] || gamma > 0 || additional[A_MERGE_POLYGONS_AS_NEEDED] || additional[A_INCREASE_SPACING_AS_NEEDED]) {
|
||||
index = encode(bbox[0] / 2 + bbox[2] / 2, bbox[1] / 2 + bbox[3] / 2);
|
||||
}
|
||||
|
||||
@ -1425,6 +1428,15 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
}
|
||||
}
|
||||
|
||||
if (additional[A_INCREASE_SPACING_AS_NEEDED]) {
|
||||
if (index > merge_previndex) {
|
||||
indices.push_back(index - merge_previndex);
|
||||
}
|
||||
if (index - merge_previndex < mingap) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (additional[A_CALCULATE_FEATURE_DENSITY]) {
|
||||
// Gamma is always 1 for this calculation so there is a reasonable
|
||||
// interpretation when no features are being dropped.
|
||||
@ -1722,6 +1734,19 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
}
|
||||
line_detail++; // to keep it the same when the loop decrements it
|
||||
continue;
|
||||
} else if (additional[A_INCREASE_SPACING_AS_NEEDED]) {
|
||||
std::sort(indices.begin(), indices.end());
|
||||
mingap_fraction = mingap_fraction * 200000.0 / totalsize * 0.95;
|
||||
size_t n = (indices.size() - 1) * (1 - mingap_fraction);
|
||||
if (n < indices.size() && mingap < indices[n]) {
|
||||
mingap = indices[n];
|
||||
arg->mingap = mingap;
|
||||
if (!quiet) {
|
||||
fprintf(stderr, "Going to try keeping the sparsest %0.2f%% of the features to make it fit\n", mingap_fraction * 100.0);
|
||||
}
|
||||
line_detail++;
|
||||
continue;
|
||||
}
|
||||
} else if (prevent[P_DYNAMIC_DROP]) {
|
||||
fraction = fraction * 200000 / totalsize * 0.95;
|
||||
if (!quiet) {
|
||||
@ -1761,6 +1786,18 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
fprintf(stderr, "Going to try gamma of %0.3f to make it fit\n", gamma);
|
||||
}
|
||||
line_detail++; // to keep it the same when the loop decrements it
|
||||
} else if (additional[A_INCREASE_SPACING_AS_NEEDED]) {
|
||||
std::sort(indices.begin(), indices.end());
|
||||
mingap_fraction = mingap_fraction * max_tile_size / compressed.size() * 0.95;
|
||||
size_t n = (indices.size() - 1) * (1 - mingap_fraction);
|
||||
if (n < indices.size() && mingap < indices[n]) {
|
||||
mingap = indices[n];
|
||||
arg->mingap = mingap;
|
||||
if (!quiet) {
|
||||
fprintf(stderr, "Going to try keeping the sparsest %0.2f%% of the features to make it fit\n", mingap_fraction * 100.0);
|
||||
}
|
||||
line_detail++;
|
||||
}
|
||||
} else if (prevent[P_DYNAMIC_DROP]) {
|
||||
// The 95% is a guess to avoid too many retries
|
||||
// and probably actually varies based on how much duplicated metadata there is
|
||||
@ -1840,7 +1877,7 @@ void *run_thread(void *vargs) {
|
||||
|
||||
// fprintf(stderr, "%d/%u/%u\n", z, x, y);
|
||||
|
||||
long long len = write_tile(geom, &geompos, arg->metabase, arg->stringpool, z, x, y, z == arg->maxzoom ? arg->full_detail : arg->low_detail, arg->min_detail, arg->basezoom, arg->outdb, arg->droprate, arg->buffer, arg->fname, arg->geomfile, arg->minzoom, arg->maxzoom, arg->todo, arg->along, geompos, arg->gamma, arg->child_shards, arg->meta_off, arg->pool_off, arg->initial_x, arg->initial_y, arg->running, arg->simplification, arg->layermaps, arg->layer_unmaps, arg->pass, arg);
|
||||
long long len = write_tile(geom, &geompos, arg->metabase, arg->stringpool, z, x, y, z == arg->maxzoom ? arg->full_detail : arg->low_detail, arg->min_detail, arg->basezoom, arg->outdb, arg->droprate, arg->buffer, arg->fname, arg->geomfile, arg->minzoom, arg->maxzoom, arg->todo, arg->along, geompos, arg->gamma, arg->child_shards, arg->meta_off, arg->pool_off, arg->initial_x, arg->initial_y, arg->running, arg->simplification, arg->layermaps, arg->layer_unmaps, arg->pass, arg->mingap, arg);
|
||||
|
||||
if (len < 0) {
|
||||
int *err = &arg->err;
|
||||
@ -2020,11 +2057,12 @@ int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpo
|
||||
int err = INT_MAX;
|
||||
|
||||
size_t start = 1;
|
||||
if (additional[A_INCREASE_GAMMA_AS_NEEDED]) {
|
||||
if (additional[A_INCREASE_GAMMA_AS_NEEDED] || additional[A_INCREASE_SPACING_AS_NEEDED]) {
|
||||
start = 0;
|
||||
}
|
||||
|
||||
double zoom_gamma = gamma;
|
||||
double zoom_mingap = 0;
|
||||
|
||||
for (size_t pass = start; pass < 2; pass++) {
|
||||
pthread_t pthreads[threads];
|
||||
@ -2044,6 +2082,7 @@ int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpo
|
||||
args[thread].todo = todo;
|
||||
args[thread].along = &along; // locked with var_lock
|
||||
args[thread].gamma = zoom_gamma;
|
||||
args[thread].mingap = zoom_mingap;
|
||||
args[thread].child_shards = TEMP_FILES / threads;
|
||||
args[thread].simplification = simplification;
|
||||
|
||||
@ -2087,6 +2126,9 @@ int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpo
|
||||
if (args[thread].gamma > zoom_gamma) {
|
||||
zoom_gamma = args[thread].gamma;
|
||||
}
|
||||
if (args[thread].mingap > zoom_mingap) {
|
||||
zoom_mingap = args[thread].mingap;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user