Add an option to merge adjacent polygons together to reduce tile size

This commit is contained in:
Eric Fischer 2016-10-24 16:27:00 -07:00
parent f32916c472
commit 2798bf7b6f
5 changed files with 41 additions and 8 deletions

View File

@ -128,6 +128,7 @@ resolution is obtained than by using a smaller _maxzoom_ or _detail_.
* -ag or --calculate-feature-density: Add a new attribute, `tippecanoe_feature_density`, to each feature, to record how densely features are spaced in that area of the tile. You can use this attribute in the style to produce a glowing effect where points are densely packed. It can range from 0 in the sparsest areas to 255 in the densest.
* -ab or --detect-shared-borders: In the manner of [TopoJSON](https://github.com/mbostock/topojson/wiki/Introduction), detect borders that are shared between multiple polygons and simplify them identically in each polygon. This takes more time and memory than considering each polygon individually.
* -aG or --increase-gamma-as-needed: If a tile is too large, try to reduce it to under 500K by increasing the `-g` gamma
* -am or --merge-polygons-as-needed: If a tile is too large, try to reduce it to under 500K by merging adjacent polygons together
### Doing less

View File

@ -1914,6 +1914,7 @@ int main(int argc, char **argv) {
{"calculate-feature-density", no_argument, &additional[A_CALCULATE_FEATURE_DENSITY], 1},
{"detect-shared-borders", no_argument, &additional[A_DETECT_SHARED_BORDERS], 1},
{"increase-gamma-as-needed", no_argument, &additional[A_INCREASE_GAMMA_AS_NEEDED], 1},
{"merge-polygons-as-needed", no_argument, &additional[A_MERGE_POLYGONS_AS_NEEDED], 1},
{"no-line-simplification", no_argument, &prevent[P_SIMPLIFY], 1},
{"simplify-only-low-zooms", no_argument, &prevent[P_SIMPLIFY_LOW], 1},

View File

@ -154,6 +154,8 @@ which may not be what you want.
\-ab or \-\-detect\-shared\-borders: In the manner of TopoJSON \[la]https://github.com/mbostock/topojson/wiki/Introduction\[ra], detect borders that are shared between multiple polygons and simplify them identically in each polygon. This takes more time and memory than considering each polygon individually.
.IP \(bu 2
\-aG or \-\-increase\-gamma\-as\-needed: If a tile is too large, try to reduce it to under 500K by increasing the \fB\fC\-g\fR gamma
.IP \(bu 2
\-am or \-\-merge\-polygons\-as\-needed: If a tile is too large, try to reduce it to under 500K by merging adjacent polygons together
.RE
.SS Doing less
.RS

View File

@ -8,6 +8,7 @@
#define A_PREFER_RADIX_SORT ((int) 'R')
#define A_CALCULATE_FEATURE_DENSITY ((int) 'g')
#define A_INCREASE_GAMMA_AS_NEEDED ((int) 'G')
#define A_MERGE_POLYGONS_AS_NEEDED ((int) 'm')
#define P_SIMPLIFY ((int) 's')
#define P_SIMPLIFY_LOW ((int) 'S')

View File

@ -647,7 +647,9 @@ bool edges_same(std::pair<std::vector<edge>::iterator, std::vector<edge>::iterat
return true;
}
void find_common_edges(std::vector<partial> &partials, int z, int line_detail, double simplification, int maxzoom) {
bool find_common_edges(std::vector<partial> &partials, int z, int line_detail, double simplification, int maxzoom, double merge_fraction) {
size_t merge_count = ceil((1 - merge_fraction) * partials.size());
for (size_t i = 0; i < partials.size(); i++) {
if (partials[i].t == VT_POLYGON) {
for (size_t j = 0; j < partials[i].geoms.size(); j++) {
@ -928,9 +930,17 @@ void find_common_edges(std::vector<partial> &partials, int z, int line_detail, d
// If necessary, merge some adjacent polygons into some other polygons
#if 0
size_t merged = 0;
std::map<unsigned long long, size_t> merge_order;
for (size_t i = 0; i < partials.size(); i++) {
for (size_t j = 0; j < partials[i].arc_polygon.size(); j++) {
merge_order.insert(std::pair<unsigned long long, size_t>(partials[i].index - partials[i].index2, i));
}
for (auto mo = merge_order.begin(); mo != merge_order.end(); ++mo) {
if (merged >= merge_count) {
break;
}
size_t i = mo->second;
for (size_t j = 0; j < partials[i].arc_polygon.size() && merged < merge_count; j++) {
if (merge_candidates.count(-partials[i].arc_polygon[j]) > 0) {
auto r = merge_candidates.equal_range(-partials[i].arc_polygon[j]);
for (auto a = r.first; a != r.second; ++a) {
@ -1011,6 +1021,7 @@ void find_common_edges(std::vector<partial> &partials, int z, int line_detail, d
}
partials[a->second].arc_polygon.clear();
merged++;
for (size_t k = 0; k < additions.size(); k++) {
partials[i].arc_polygon.push_back(additions[k]);
@ -1038,7 +1049,6 @@ void find_common_edges(std::vector<partial> &partials, int z, int line_detail, d
}
}
}
#endif
// Turn the arc representations of the polygons back into standard polygon geometries
@ -1082,11 +1092,18 @@ void find_common_edges(std::vector<partial> &partials, int z, int line_detail, d
}
}
}
if (merged >= merge_count) {
return true;
} else {
return false;
}
}
long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *stringpool, int z, unsigned tx, unsigned ty, int detail, int min_detail, int basezoom, sqlite3 *outdb, double droprate, int buffer, const char *fname, FILE **geomfile, int minzoom, int maxzoom, double todo, volatile long long *along, long long alongminus, double gamma, int child_shards, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, volatile int *running, double simplification, std::vector<std::map<std::string, layermap_entry>> *layermaps, std::vector<std::vector<std::string>> *layer_unmaps) {
int line_detail;
double fraction = 1;
double merge_fraction = 1;
long long og = *geompos_in;
@ -1126,7 +1143,7 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
double fraction_accum = 0;
unsigned long long previndex = 0, density_previndex = 0;
unsigned long long previndex = 0, density_previndex = 0, merge_previndex = 0;
double scale = (double) (1LL << (64 - 2 * (z + 8)));
double gap = 0, density_gap = 0;
double spacing = 0;
@ -1320,7 +1337,7 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
}
unsigned long long index = 0;
if (additional[A_CALCULATE_FEATURE_DENSITY] || gamma > 0) {
if (additional[A_CALCULATE_FEATURE_DENSITY] || gamma > 0 || additional[A_MERGE_POLYGONS_AS_NEEDED]) {
index = encode(bbox[0] / 2 + bbox[2] / 2, bbox[1] / 2 + bbox[3] / 2);
}
@ -1372,14 +1389,19 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
p.simplification = simplification;
p.id = id;
p.has_id = has_id;
p.index2 = merge_previndex;
p.index = index;
partials.push_back(p);
}
merge_previndex = index;
}
first_time = false;
bool merge_successful = true;
if (additional[A_DETECT_SHARED_BORDERS]) {
find_common_edges(partials, z, line_detail, simplification, maxzoom);
if (additional[A_DETECT_SHARED_BORDERS] || (additional[A_MERGE_POLYGONS_AS_NEEDED] && merge_fraction < 1)) {
merge_successful = find_common_edges(partials, z, line_detail, simplification, maxzoom, merge_fraction);
}
int tasks = ceil((double) CPUS / *running);
@ -1651,6 +1673,12 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
fprintf(stderr, "Going to try gamma of %0.3f to make it fit\n", gamma);
}
line_detail++; // to keep it the same when the loop decrements it
} else if (additional[A_MERGE_POLYGONS_AS_NEEDED] && merge_fraction > .05 && merge_successful) {
merge_fraction = merge_fraction * max_tile_size / compressed.size() * 0.95;
if (!quiet) {
fprintf(stderr, "Going to try merging %0.2f%% of the polygons to make it fit\n", 100 - merge_fraction * 100);
}
line_detail++; // to keep it the same when the loop decrements it
}
} else {
if (pthread_mutex_lock(&db_lock) != 0) {