Tests of the three current strategies for reducing tile size

This commit is contained in:
Eric Fischer 2016-10-24 15:33:14 -07:00
parent 7cb7fc4913
commit f32916c472
8 changed files with 46772 additions and 6 deletions

View File

@ -91,6 +91,7 @@ Options
* -m _detail_ or --minimum-detail=_detail_: Minimum detail that it will try if tiles are too big at regular detail (default 7)
* -b _pixels_ or --buffer=_pixels_: Buffer size where features are duplicated from adjacent tiles. Units are "screen pixels"--1/256th of the tile width or height. (default 5)
* -s _projection_ or --projection=_projection_: Specify the projection of the input data. Currently supported are EPSG:4326 (WGS84, the default) and EPSG:3857 (Web Mercator).
* -k _bytes_ or --maximum-tile-bytes=_bytes_: Use the specified number of _bytes_ as the maximum compressed tile size instead of 500K.
All internal math is done in terms of a 32-bit tile coordinate system, so 1/(2^32) of the size of Earth,
or about 1cm, is the smallest distinguishable distance. If _maxzoom_ + _detail_ > 32, no additional

View File

@ -58,6 +58,7 @@ static int min_detail = 7;
int quiet = 0;
int geometry_scale = 0;
double simplification = 1;
size_t max_tile_size = 500000;
int prevent[256];
int additional[256];
@ -1894,6 +1895,7 @@ int main(int argc, char **argv) {
{"additional", required_argument, 0, 'a'},
{"projection", required_argument, 0, 's'},
{"simplification", required_argument, 0, 'S'},
{"maximum-tile-bytes", required_argument, 0, 'k'},
{"exclude-all", no_argument, 0, 'X'},
{"force", no_argument, 0, 'f'},
@ -1944,7 +1946,7 @@ int main(int argc, char **argv) {
}
}
while ((i = getopt_long(argc, argv, "n:l:z:Z:B:d:D:m:o:x:y:r:b:t:g:p:a:XfFqvPL:A:s:S:", long_options, NULL)) != -1) {
while ((i = getopt_long(argc, argv, "n:l:z:Z:B:d:D:m:o:x:y:r:b:t:g:p:a:XfFqvPL:A:s:S:k:", long_options, NULL)) != -1) {
switch (i) {
case 0:
break;
@ -2123,6 +2125,10 @@ int main(int argc, char **argv) {
}
break;
case 'k':
max_tile_size = atoll(optarg);
break;
default: {
int width = 7 + strlen(argv[0]);
fprintf(stderr, "Usage: %s", argv[0]);

View File

@ -14,3 +14,5 @@ extern int quiet;
extern size_t CPUS;
extern size_t TEMP_FILES;
extern size_t max_tile_size;

View File

@ -102,6 +102,8 @@ compensate for the larger marker, or \-Bf\fInumber\fP to allow at most \fInumber
\-b \fIpixels\fP or \-\-buffer=\fIpixels\fP: Buffer size where features are duplicated from adjacent tiles. Units are "screen pixels"\-\-1/256th of the tile width or height. (default 5)
.IP \(bu 2
\-s \fIprojection\fP or \-\-projection=\fIprojection\fP: Specify the projection of the input data. Currently supported are EPSG:4326 (WGS84, the default) and EPSG:3857 (Web Mercator).
.IP \(bu 2
\-k \fIbytes\fP or \-\-maximum\-tile\-bytes=\fIbytes\fP: Use the specified number of \fIbytes\fP as the maximum compressed tile size instead of 500K.
.RE
.PP
All internal math is done in terms of a 32\-bit tile coordinate system, so 1/(2 of the size of Earth,

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -900,6 +900,8 @@ void find_common_edges(std::vector<partial> &partials, int z, int line_detail, d
}
}
// Simplify each arc
std::vector<drawvec> simplified_arcs;
size_t count = 0;
@ -924,6 +926,9 @@ void find_common_edges(std::vector<partial> &partials, int z, int line_detail, d
count++;
}
// If necessary, merge some adjacent polygons into some other polygons
#if 0
for (size_t i = 0; i < partials.size(); i++) {
for (size_t j = 0; j < partials[i].arc_polygon.size(); j++) {
if (merge_candidates.count(-partials[i].arc_polygon[j]) > 0) {
@ -1033,7 +1038,9 @@ void find_common_edges(std::vector<partial> &partials, int z, int line_detail, d
}
}
}
#endif
// Turn the arc representations of the polygons back into standard polygon geometries
for (size_t i = 0; i < partials.size(); i++) {
if (partials[i].t == VT_POLYGON) {
@ -1599,7 +1606,7 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
}
line_detail++; // to keep it the same when the loop decrements it
continue;
} else if (additional[A_INCREASE_GAMMA_AS_NEEDED]) {
} else if (additional[A_INCREASE_GAMMA_AS_NEEDED] && gamma < 10) {
if (gamma < 1) {
gamma = 1;
} else {
@ -1619,21 +1626,21 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
std::string compressed = tile.encode();
if (compressed.size() > 500000 && !prevent[P_KILOBYTE_LIMIT]) {
if (compressed.size() > max_tile_size && !prevent[P_KILOBYTE_LIMIT]) {
if (!quiet) {
fprintf(stderr, "tile %d/%u/%u size is %lld with detail %d, >500000 \n", z, tx, ty, (long long) compressed.size(), line_detail);
fprintf(stderr, "tile %d/%u/%u size is %lld with detail %d, >%zu \n", z, tx, ty, (long long) compressed.size(), line_detail, max_tile_size);
}
if (prevent[P_DYNAMIC_DROP]) {
// The 95% is a guess to avoid too many retries
// and probably actually varies based on how much duplicated metadata there is
fraction = fraction * 500000 / compressed.size() * 0.95;
fraction = fraction * max_tile_size / compressed.size() * 0.95;
if (!quiet) {
fprintf(stderr, "Going to try keeping %0.2f%% of the features to make it fit\n", fraction * 100);
}
line_detail++; // to keep it the same when the loop decrements it
} else if (additional[A_INCREASE_GAMMA_AS_NEEDED]) {
} else if (additional[A_INCREASE_GAMMA_AS_NEEDED] && gamma < 10) {
if (gamma < 1) {
gamma = 1;
} else {